mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
374 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
986c5b7133 | ||
|
|
4e334d4fff | ||
|
|
dcf7f92aab | ||
|
|
f6a91cb53c | ||
|
|
726fbbb52a | ||
|
|
29d2278579 | ||
|
|
72ceeff022 | ||
|
|
54d65ec011 | ||
|
|
96aef5c4a6 | ||
|
|
7b64166fb0 | ||
|
|
1f5908e0b8 | ||
|
|
a4562d18b6 | ||
|
|
875e232199 | ||
|
|
80f95a4674 | ||
|
|
17d56aa972 | ||
|
|
f4ba60cf8f | ||
|
|
0b8a648f13 | ||
|
|
2576a3af2c | ||
|
|
2e6c73fa3c | ||
|
|
b8d8ee4560 | ||
|
|
d9b74ada84 | ||
|
|
01b058151b | ||
|
|
989d952f35 | ||
|
|
908af3e024 | ||
|
|
819157fda1 | ||
|
|
5a4458e93f | ||
|
|
1fbd403f34 | ||
|
|
098e519c55 | ||
|
|
3ef4a242f9 | ||
|
|
ad3044dce1 | ||
|
|
e40541d831 | ||
|
|
2786e7dbaf | ||
|
|
196d681a63 | ||
|
|
d2353e3c35 | ||
|
|
2475031f88 | ||
|
|
cd15e68adc | ||
|
|
27431f779d | ||
|
|
b9b5a2faeb | ||
|
|
e471b11d3b | ||
|
|
a742a3d2e3 | ||
|
|
c615f6c07e | ||
|
|
a6ebfb08f7 | ||
|
|
2b0d162226 | ||
|
|
2c5f09a8bb | ||
|
|
ef073e586b | ||
|
|
82bfdb87e3 | ||
|
|
767e7b80cb | ||
|
|
8d26ea9063 | ||
|
|
1a7c4310d0 | ||
|
|
4e8fe79e2b | ||
|
|
a8c5551292 | ||
|
|
2bf73109b2 | ||
|
|
f0ab3750bd | ||
|
|
58a11e37fe | ||
|
|
927bf46304 | ||
|
|
6b89857697 | ||
|
|
b72e5ccef6 | ||
|
|
6617b7811b | ||
|
|
e1c1988db4 | ||
|
|
af99ea4678 | ||
|
|
a6d5316090 | ||
|
|
f5e7a84fa6 | ||
|
|
c013764b61 | ||
|
|
2320ab0dfc | ||
|
|
1281a0f7e4 | ||
|
|
d8350cd4ee | ||
|
|
e3b7c23ed9 | ||
|
|
eae1ea21d6 | ||
|
|
541aa76b64 | ||
|
|
7b8555d524 | ||
|
|
fdf998c181 | ||
|
|
3d6b343adc | ||
|
|
e338cecc14 | ||
|
|
e5537a33fb | ||
|
|
35384deb68 | ||
|
|
547ca60c2a | ||
|
|
376f6f7455 | ||
|
|
abe92dedff | ||
|
|
4b521ceedc | ||
|
|
6dfcb9e52b | ||
|
|
335e3216e2 | ||
|
|
5b22bb4818 | ||
|
|
0097004882 | ||
|
|
1bc9e4c2d3 | ||
|
|
36c7e1a3c3 | ||
|
|
c6b4d04e26 | ||
|
|
fa6cf068c7 | ||
|
|
7c273a3a48 | ||
|
|
3de2ea1523 | ||
|
|
c5c9f84503 | ||
|
|
16ea9a3e07 | ||
|
|
48f952c798 | ||
|
|
f78ea5de07 | ||
|
|
5adbd5e784 | ||
|
|
5b2afa79d7 | ||
|
|
dc4e6d02b7 | ||
|
|
8ae61c8f78 | ||
|
|
684b8e0914 | ||
|
|
7c3314abae | ||
|
|
ab9f8ff356 | ||
|
|
892d8cd5c1 | ||
|
|
8b8b45778d | ||
|
|
6655fb182c | ||
|
|
0926d40247 | ||
|
|
ddc4d36688 | ||
|
|
53e1f22eb1 | ||
|
|
3d2a34737b | ||
|
|
ebde77008c | ||
|
|
3d27fd04ba | ||
|
|
d9fcaf3473 | ||
|
|
d266f761aa | ||
|
|
1d01405412 | ||
|
|
7c62eb5bd6 | ||
|
|
4dcc76d366 | ||
|
|
d2fad19a11 | ||
|
|
7c92c4c964 | ||
|
|
5a71d33236 | ||
|
|
1b4db4f793 | ||
|
|
c084b22815 | ||
|
|
acacef95cd | ||
|
|
5d722183d3 | ||
|
|
ac19ea5407 | ||
|
|
d19b05b970 | ||
|
|
a0795136ac | ||
|
|
d2566e345a | ||
|
|
66cd7cf90e | ||
|
|
9a599981ef | ||
|
|
f51f7bc82a | ||
|
|
dbcbac0137 | ||
|
|
e722f8a87c | ||
|
|
61679749eb | ||
|
|
23e12c9c44 | ||
|
|
6da78cd3e5 | ||
|
|
78ce8100a3 | ||
|
|
76ba338b45 | ||
|
|
823fe2deb2 | ||
|
|
cb90f692f2 | ||
|
|
0325343ede | ||
|
|
69d1556a1d | ||
|
|
2daa043840 | ||
|
|
f340ca9d05 | ||
|
|
02abd038fa | ||
|
|
b9da68ec28 | ||
|
|
88b3910d80 | ||
|
|
160412f6e4 | ||
|
|
59a86b25fc | ||
|
|
49e58b39f5 | ||
|
|
58e0757bbd | ||
|
|
5ff4197572 | ||
|
|
b56e28d27a | ||
|
|
c3d39e1dd4 | ||
|
|
716aa36bfd | ||
|
|
f01460170e | ||
|
|
a414ce282d | ||
|
|
6c32f3b130 | ||
|
|
4cf907c572 | ||
|
|
b28baaa5aa | ||
|
|
980dea64e0 | ||
|
|
c340f6436f | ||
|
|
54376fd105 | ||
|
|
ef006578b2 | ||
|
|
b0b1ee0c60 | ||
|
|
4e2026aa2d | ||
|
|
e0e50b4bd5 | ||
|
|
c9b52f1310 | ||
|
|
0195213dfb | ||
|
|
d6225cbde3 | ||
|
|
7b4c194b97 | ||
|
|
a5ecff24a3 | ||
|
|
c9c003dc9b | ||
|
|
fd95936219 | ||
|
|
15a3fd4456 | ||
|
|
df896542e4 | ||
|
|
8927e81274 | ||
|
|
340f061827 | ||
|
|
15cbac97c2 | ||
|
|
bb32d0f7d1 | ||
|
|
c370fba9ba | ||
|
|
6e32421172 | ||
|
|
6643687c0a | ||
|
|
ed01e78d77 | ||
|
|
93aed52f88 | ||
|
|
bb6d1fd6a3 | ||
|
|
6e33179fc2 | ||
|
|
277fd167cf | ||
|
|
98e8d5170b | ||
|
|
11ee1651ae | ||
|
|
0dfcf9b1e6 | ||
|
|
08f57ac5bc | ||
|
|
7095e781e9 | ||
|
|
df18b93809 | ||
|
|
0c2e028b38 | ||
|
|
80cb1bc129 | ||
|
|
74c1cb51f6 | ||
|
|
2e864bddf9 | ||
|
|
e60ae91b5d | ||
|
|
d606cd86a0 | ||
|
|
bc463c37f4 | ||
|
|
76c1480903 | ||
|
|
6f312caf8b | ||
|
|
980d8d374f | ||
|
|
c49b34942f | ||
|
|
fcfa8717a5 | ||
|
|
954a265965 | ||
|
|
69845a020a | ||
|
|
22200fd8a7 | ||
|
|
add441675d | ||
|
|
d3d9754277 | ||
|
|
aa5e2edbc5 | ||
|
|
310b099ecf | ||
|
|
1cfaef911c | ||
|
|
b931c5f638 | ||
|
|
7c683668eb | ||
|
|
cab7ac7d58 | ||
|
|
15e69c538a | ||
|
|
31ee938b66 | ||
|
|
e51a8d43d9 | ||
|
|
64cd5b6e4b | ||
|
|
6c9ef34905 | ||
|
|
aa89019236 | ||
|
|
df58fcee16 | ||
|
|
ea3ffc429f | ||
|
|
2efca7a2b5 | ||
|
|
9db448a5e2 | ||
|
|
feee90beef | ||
|
|
906a63b6b5 | ||
|
|
2ce64ac213 | ||
|
|
4d8bf57135 | ||
|
|
c5348ce4b3 | ||
|
|
7f87c03f97 | ||
|
|
9469f148ff | ||
|
|
ffb7dc4ec2 | ||
|
|
242b8fa746 | ||
|
|
50cae5ac3b | ||
|
|
6a71233eb2 | ||
|
|
1aff8933c9 | ||
|
|
0ed87a5dfc | ||
|
|
24a6bcbd1e | ||
|
|
ca7f3da19d | ||
|
|
bf047e2a3c | ||
|
|
4454287be9 | ||
|
|
3bd2183655 | ||
|
|
1f7080e8f8 | ||
|
|
8b20761e8b | ||
|
|
655d0b5d5f | ||
|
|
91849cdd3a | ||
|
|
df25a694c3 | ||
|
|
eabaca145e | ||
|
|
2f0e458765 | ||
|
|
ff8037f231 | ||
|
|
a116028e1b | ||
|
|
e606a02b29 | ||
|
|
531c712ea5 | ||
|
|
3ae7624361 | ||
|
|
fed83462fa | ||
|
|
58c9f937c5 | ||
|
|
5d14b9209d | ||
|
|
305a95fa74 | ||
|
|
b29c1e702a | ||
|
|
b04d75ab08 | ||
|
|
25abfaadb9 | ||
|
|
1df81b8698 | ||
|
|
4487846fd7 | ||
|
|
86918f5160 | ||
|
|
bc723b3f15 | ||
|
|
1881e646d4 | ||
|
|
aa98808a1a | ||
|
|
f9a2232703 | ||
|
|
19d6be8663 | ||
|
|
0eb7c890ad | ||
|
|
7bfa68aa58 | ||
|
|
857a38050e | ||
|
|
c5b7f92caf | ||
|
|
df31ffd7fb | ||
|
|
0df0322d36 | ||
|
|
260552322d | ||
|
|
88ef6496a2 | ||
|
|
bdf123bf7b | ||
|
|
8fc3760eef | ||
|
|
5656f6f709 | ||
|
|
53e7e8b77e | ||
|
|
b990915b7a | ||
|
|
15b7822ffd | ||
|
|
cfa28419cb | ||
|
|
30ef0d2a3a | ||
|
|
755f99200a | ||
|
|
7af79ed3a2 | ||
|
|
2971e14269 | ||
|
|
01954aaf30 | ||
|
|
da018a8f2a | ||
|
|
77400bbbb0 | ||
|
|
3c3333d3df | ||
|
|
4963bd4144 | ||
|
|
b4a418dded | ||
|
|
a724b0daee | ||
|
|
88aa620cb4 | ||
|
|
70d3448110 | ||
|
|
09a1a406a6 | ||
|
|
40939d0b7f | ||
|
|
aec1d184c8 | ||
|
|
69d3cb5dd8 | ||
|
|
3deff162bb | ||
|
|
4ed54568d3 | ||
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c | ||
|
|
9c173d1de0 | ||
|
|
e11b6d74ed | ||
|
|
c7efe899fa | ||
|
|
adcd68c1ab | ||
|
|
23a4ebb74a | ||
|
|
cccb9a5fec |
@@ -1,4 +1,16 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
.routify
|
||||
.pnpm-store
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
@@ -1,35 +1,5 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
COOLIFY_APP_ID=
|
||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||
COOLIFY_SENTRY_DSN=
|
||||
COOLIFY_IS_ON="docker"
|
||||
20
.eslintrc.cjs
Normal file
20
.eslintrc.cjs
Normal file
@@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
plugins: ['svelte3', '@typescript-eslint'],
|
||||
ignorePatterns: ['*.cjs'],
|
||||
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||
settings: {
|
||||
'svelte3/typescript': () => require('typescript')
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
es2017: true,
|
||||
node: true
|
||||
}
|
||||
};
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
ko_fi: andrasbacsai
|
||||
open_collective: coollabsio
|
||||
|
||||
23
.gitignore
vendored
23
.gitignore
vendored
@@ -1,11 +1,16 @@
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
.routify
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
.pnpm-store
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
|
||||
1
.husky/_/.gitignore
vendored
Normal file
1
.husky/_/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*
|
||||
4
.husky/pre-commit
Executable file
4
.husky/pre-commit
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
yarn lint-staged
|
||||
5
.lintstagedrc.json
Normal file
5
.lintstagedrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
|
||||
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
|
||||
]
|
||||
}
|
||||
18
.prettierrc
18
.prettierrc
@@ -1,14 +1,6 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"printWidth": 80,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"svelteSortOrder" : "styles-scripts-markup",
|
||||
"svelteStrictMode": true,
|
||||
"svelteBracketNewLine": true,
|
||||
"svelteAllowShorthand": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
}
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
||||
43
CONTRIBUTING.md
Normal file
43
CONTRIBUTING.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Welcome
|
||||
|
||||
First of all, thank you for considering to contribute to my project! It means a lot 💜.
|
||||
|
||||
# Technical skills required
|
||||
|
||||
- Node.js / Javascript
|
||||
- Svelte / SvelteKit
|
||||
- Prisma.io
|
||||
|
||||
# Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Click "Change to draft"
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
This repository best with [pnpm](https://pnpm.io) due to the lock file. I recommend you should try and use `pnpm` as well, because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
## Setup development environment
|
||||
|
||||
- Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
- Install dependencies with `pnpm install`.
|
||||
- Need to create a local SQlite database with `pnpm db:push`.
|
||||
- This will apply all migrations and seed the database at `db/dev.db`.
|
||||
- You can start coding after starting `pnpm dev`.
|
||||
|
||||
## Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
## Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify is using, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking of a different queue/scheduler library. I'm open for discussion!
|
||||
31
Dockerfile
Normal file
31
Dockerfile
Normal file
@@ -0,0 +1,31 @@
|
||||
FROM node:16.14.0-alpine
|
||||
RUN apk add --no-cache g++ cmake make python3
|
||||
WORKDIR /app
|
||||
COPY package*.json .
|
||||
RUN yarn install
|
||||
COPY . .
|
||||
RUN yarn build
|
||||
|
||||
FROM node:16.14.0-alpine
|
||||
WORKDIR /app
|
||||
|
||||
LABEL coolify.managed true
|
||||
|
||||
RUN apk add --no-cache git openssh-client curl jq cmake sqlite
|
||||
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
|
||||
RUN curl -fsSL "https://download.docker.com/linux/static/stable/x86_64/docker-20.10.9.tgz" | tar -xzvf - docker/docker -C . --strip-components 1 && mv docker /usr/bin/docker
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://github.com/docker/compose/releases/download/v2.2.2/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose
|
||||
|
||||
COPY --from=0 /app/docker-compose.yaml .
|
||||
COPY --from=0 /app/build .
|
||||
COPY --from=0 /app/package.json .
|
||||
COPY --from=0 /app/node_modules ./node_modules
|
||||
COPY --from=0 /app/prisma ./prisma
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["pnpm", "start"]
|
||||
129
README.md
129
README.md
@@ -1,93 +1,98 @@
|
||||
# About
|
||||
# Coolify
|
||||
|
||||
https://andrasbacsai.com/farewell-netlify-and-heroku-after-3-days-of-coding
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
# Features
|
||||
- Deploy your Node.js, static sites, PHP or any custom application (with custom Dockerfile) just by pushing code to git.
|
||||
- Hassle-free installation and upgrade process.
|
||||
- One-click MongoDB, MySQL, PostgreSQL, CouchDB deployments!
|
||||
## Live Demo
|
||||
|
||||
# Upcoming features
|
||||
- Backups & monitoring.
|
||||
- User analytics with privacy in mind.
|
||||
- And much more (see [Roadmap](https://github.com/coollabsio/coolify/projects/1)).
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsible, that means someone overloaded the server. 🙃)
|
||||
|
||||
# FAQ
|
||||
Q: What is a buildpack?
|
||||
## How to install
|
||||
|
||||
A: It defines your application's final form.
|
||||
`Static` means that it will be hosted as a static site.
|
||||
`NodeJs` means that it will be started as a node application.
|
||||
Installation is automated with the following command:
|
||||
|
||||
# Screenshots
|
||||
```bash
|
||||
/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"
|
||||
```
|
||||
|
||||
[Login](https://coolify.io/login.jpg)
|
||||
If you would like no questions during installation
|
||||
|
||||
[Applications](https://coolify.io/applications.jpg)
|
||||
## Features
|
||||
|
||||
[Databases](https://coolify.io/databases.jpg)
|
||||
### Git Sources
|
||||
|
||||
[Configuration](https://coolify.io/configuration.jpg)
|
||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self hosted versions also supported.)
|
||||
|
||||
[Settings](https://coolify.io/settings.jpg)
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
[Logs](https://coolify.io/logs.jpg)
|
||||
### Destinations
|
||||
|
||||
# Getting Started
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
Automatically: `/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"`
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
Manually:
|
||||
### Requirements before installation
|
||||
- [Docker](https://docs.docker.com/engine/install/) version 20+
|
||||
- Docker in [swarm mode enabled](https://docs.docker.com/engine/reference/commandline/swarm_init/) (should be set manually before installation)
|
||||
- A [MongoDB](https://docs.mongodb.com/manual/installation/) instance.
|
||||
- We have a [simple installation](https://github.com/coollabsio/infrastructure/tree/main/mongo) if you need one
|
||||
- A configured DNS entry (see `.env.template`)
|
||||
- [Github App](https://docs.github.com/en/developers/apps/creating-a-github-app)
|
||||
### Applications
|
||||
|
||||
- GitHub App name: could be anything weird
|
||||
- Homepage URL: https://yourdomain
|
||||
These are the predefined build packs, but with the Docker build pack, you can host basically anything that is hostable with a single Dockerfile.
|
||||
|
||||
Identifying and authorizing users:
|
||||
- Callback URL: https://yourdomain/api/v1/login/github/app
|
||||
- Request user authorization (OAuth) during installation -> Check!
|
||||
- Static sites
|
||||
- NodeJS
|
||||
- VueJS
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- NextJS
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Rust
|
||||
- Docker
|
||||
|
||||
Webhook:
|
||||
- Active -> Check!
|
||||
- Webhook URL: https://yourdomain/api/v1/webhooks/deploy
|
||||
- Webhook Secret: it should be super secret
|
||||
### Databases
|
||||
|
||||
Repository permissions:
|
||||
- Contents: Read-only
|
||||
- Metadata: Read-only
|
||||
|
||||
User permissions:
|
||||
- Email: Read-only
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
Subscribe to events:
|
||||
- Push -> Check!
|
||||
- MongoDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
### Installation
|
||||
- Clone this repository: `git clone git@github.com:coollabsio/coolify.git`
|
||||
- Set `.env` (see `.env.template`)
|
||||
- Installation: `bash install.sh all`
|
||||
### One-click services
|
||||
|
||||
## Manual updating process (You probably never need to do this!)
|
||||
### Update everything (proxy+coolify)
|
||||
- `bash install.sh all`
|
||||
You can host cool open-source services as well:
|
||||
|
||||
### Update coolify only
|
||||
- `bash install.sh coolify`
|
||||
- [WordPress](https://wordpress.org)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://plausible.io)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
|
||||
### Update proxy only
|
||||
- `bash install.sh proxy`
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 is not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
# Contact
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Contribute
|
||||
|
||||
See [our contribution guide](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
# License
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
|
||||
30
api/app.js
30
api/app.js
@@ -1,30 +0,0 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
server.register(require('./routes/v1/config'), { prefix: '/config' })
|
||||
server.register(require('./routes/v1/application/remove'), { prefix: '/application/remove' })
|
||||
server.register(require('./routes/v1/application/logs'), { prefix: '/application/logs' })
|
||||
server.register(require('./routes/v1/application/check'), { prefix: '/application/check' })
|
||||
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
|
||||
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
|
||||
server.register(require('./routes/v1/databases'), { prefix: '/databases' })
|
||||
server.register(require('./routes/v1/services'), { prefix: '/services' })
|
||||
server.register(require('./routes/v1/services/deploy'), { prefix: '/services/deploy' })
|
||||
server.register(require('./routes/v1/server'), { prefix: '/server' })
|
||||
})
|
||||
// Public routes
|
||||
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })
|
||||
fastify.register(require('./routes/v1/login/github'), {
|
||||
prefix: '/login/github'
|
||||
})
|
||||
fastify.register(require('./routes/v1/webhooks/deploy'), {
|
||||
prefix: '/webhooks/deploy'
|
||||
})
|
||||
fastify.register(require('./routes/v1/undead'), {
|
||||
prefix: '/undead'
|
||||
})
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw new Error('No custom dockerfile found.')
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../libs/docker')
|
||||
const buildImageNodeDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ${configuration.build.directory}/package*.json ./`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`COPY ./${configuration.build.directory} ./`,
|
||||
`RUN ${configuration.build.command.build}`
|
||||
].join('\n')
|
||||
}
|
||||
async function buildImage (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildImage
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
const static = require('./static')
|
||||
const nodejs = require('./nodejs')
|
||||
const php = require('./php')
|
||||
const custom = require('./custom')
|
||||
const rust = require('./rust')
|
||||
|
||||
module.exports = { static, nodejs, php, custom, rust }
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// `HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost:${configuration.publish.port}${configuration.publish.path} || exit 1`,
|
||||
const publishNodejsDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `
|
||||
COPY ${configuration.build.directory}/package*.json ./
|
||||
RUN ${configuration.build.command.installation}
|
||||
COPY ./${configuration.build.directory} ./`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
'CMD [ "yarn", "start" ]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishPHPDocker = (configuration) => {
|
||||
return [
|
||||
'FROM php:apache',
|
||||
'RUN a2enmod rewrite',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ./${configuration.build.directory} /var/www/html`,
|
||||
'EXPOSE 80',
|
||||
' CMD ["apache2-foreground"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
const { execShellAsync } = require('../../libs/common')
|
||||
const TOML = require('@iarna/toml')
|
||||
|
||||
const publishRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target target`,
|
||||
`COPY --from=${configuration.build.container.name}:cache /usr/local/cargo /usr/local/cargo`,
|
||||
'COPY . .',
|
||||
`RUN cargo build --release --bin ${custom.name}`,
|
||||
'FROM debian:buster-slim',
|
||||
'WORKDIR /app',
|
||||
'RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*',
|
||||
'RUN update-ca-certificates',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target/release/${custom.name} ${custom.name}`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
`CMD ["/app/${custom.name}"]`
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
const cacheRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
`FROM rust:latest AS planner-${configuration.build.container.name}`,
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
'COPY . .',
|
||||
'RUN cargo chef prepare --recipe-path recipe.json',
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
`COPY --from=planner-${configuration.build.container.name} /app/recipe.json recipe.json`,
|
||||
'RUN cargo chef cook --release --recipe-path recipe.json'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
|
||||
const parsedToml = TOML.parse(cargoToml)
|
||||
const custom = {
|
||||
name: parsedToml.package.name
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
|
||||
|
||||
let stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:cache` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
|
||||
|
||||
stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishStaticDocker = (configuration) => {
|
||||
return [
|
||||
'FROM nginx:stable-alpine',
|
||||
'COPY nginx.conf /etc/nginx/nginx.conf',
|
||||
'WORKDIR /usr/share/nginx/html',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ./${configuration.build.directory} ./`,
|
||||
'EXPOSE 80',
|
||||
'CMD ["nginx", "-g", "daemon off;"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server-core')
|
||||
|
||||
const mongoServer = new MongoMemoryServer({
|
||||
instance: {
|
||||
port: 27017,
|
||||
dbName: 'coolify',
|
||||
storageEngine: 'wiredTiger'
|
||||
},
|
||||
binary: {
|
||||
version: '4.4.3'
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
mongoose.Promise = Promise
|
||||
mongoServer.getUri().then((mongoUri) => {
|
||||
const mongooseOpts = {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true
|
||||
}
|
||||
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
|
||||
mongoose.connection.on('error', (e) => {
|
||||
if (e.message.code === 'ETIMEDOUT') {
|
||||
console.log(e)
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
}
|
||||
console.log(e)
|
||||
})
|
||||
|
||||
mongoose.connection.once('open', () => {
|
||||
console.log(`Started in-memory mongodb ${mongoUri}`)
|
||||
})
|
||||
})
|
||||
@@ -1,28 +0,0 @@
|
||||
const packs = require('../../../buildPacks')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const deployId = configuration.general.deployId
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
await execute(configuration)
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} else {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
} catch (error) {
|
||||
// Hmm.
|
||||
}
|
||||
throw new Error('No buildpack found.')
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
const { docker } = require('../../docker')
|
||||
const { execShellAsync } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeImagesContainers (configuration) {
|
||||
const { name, tag } = configuration.build.container
|
||||
await execShellAsync('docker container prune -f')
|
||||
const IDsToDelete = (await execShellAsync(`docker images ls --filter=reference='${name}' --filter=before='${name}:${tag}' --format '{{json .ID }}'`)).trim().replace(/"/g, '').split('\n')
|
||||
if (IDsToDelete.length !== 0) for (const id of IDsToDelete) await execShellAsync(`docker rmi -f ${id}`)
|
||||
await execShellAsync('docker image prune -f')
|
||||
}
|
||||
|
||||
async function cleanupStuckedDeploymentsInDB () {
|
||||
// Cleanup stucked deployments.
|
||||
await Deployment.updateMany(
|
||||
{ progress: { $in: ['queued', 'inprogress'] } },
|
||||
{ progress: 'failed' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { cleanupStuckedDeploymentsInDB, deleteSameDeployments, purgeImagesContainers }
|
||||
@@ -1,117 +0,0 @@
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const cuid = require('cuid')
|
||||
const crypto = require('crypto')
|
||||
const { docker } = require('../docker')
|
||||
const { execShellAsync, baseServiceConfiguration } = require('../common')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
} else if (configuration.build.pack === 'rust') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
|
||||
if (!configuration.build.directory) configuration.build.directory = ''
|
||||
if (configuration.build.directory.startsWith('/')) configuration.build.directory = configuration.build.directory.replace('/', '')
|
||||
|
||||
if (!configuration.publish.directory) configuration.publish.directory = ''
|
||||
if (configuration.publish.directory.startsWith('/')) configuration.publish.directory = configuration.publish.directory.replace('/', '')
|
||||
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration) {
|
||||
// In case of any failure during deployment, still update the current configuration.
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
const found = services.find(s => {
|
||||
const config = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
|
||||
return config
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function precheckDeployment ({ services, configuration }) {
|
||||
let foundService = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
// Base service configuration changed
|
||||
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
|
||||
forceUpdate = true
|
||||
}
|
||||
// If the deployment is in error state, forceUpdate
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running' && n.Image.split(':')[1] === running.build.container.tag)
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
foundService = true
|
||||
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
// If only the configuration changed
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
// If only the image changed
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
// If build pack changed, forceUpdate the service
|
||||
if (running.build.pack !== configuration.build.pack) forceUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
}
|
||||
return {
|
||||
foundService,
|
||||
imageChanged,
|
||||
configChanged,
|
||||
forceUpdate
|
||||
}
|
||||
}
|
||||
module.exports = { setDefaultConfiguration, updateServiceLabels, precheckDeployment, baseServiceConfiguration }
|
||||
@@ -1,64 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
// TODO: Write full .dockerignore for all deployments!!
|
||||
if (configuration.build.pack === 'php') {
|
||||
await fs.writeFile(`${configuration.general.workdir}/.htaccess`, `
|
||||
RewriteEngine On
|
||||
RewriteBase /
|
||||
RewriteCond %{REQUEST_FILENAME} !-d
|
||||
RewriteCond %{REQUEST_FILENAME} !-f
|
||||
RewriteRule ^(.+)$ index.php [QSA,L]
|
||||
`)
|
||||
}
|
||||
// await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
|
||||
if (configuration.build.pack === 'static') {
|
||||
await fs.writeFile(
|
||||
`${configuration.general.workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
|
||||
access_log off;
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const { execShellAsync } = require('../../common')
|
||||
const { docker } = require('../../docker')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
|
||||
module.exports = async function (configuration, imageChanged) {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
const jwt = require('jsonwebtoken')
|
||||
const axios = require('axios')
|
||||
const { execShellAsync, cleanupTmp } = require('../../common')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { workdir } = configuration.general
|
||||
const { organization, name, branch } = configuration.repository
|
||||
const github = configuration.github
|
||||
|
||||
const githubPrivateKey = process.env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, '\n').replace(/"/g, '')
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname } = configuration.general
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration)
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
@@ -1,117 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const shell = require('shelljs')
|
||||
const jsonwebtoken = require('jsonwebtoken')
|
||||
const { docker } = require('./docker')
|
||||
const User = require('../models/User')
|
||||
const algorithm = 'aes-256-cbc'
|
||||
const key = process.env.SECRETS_ENCRYPTION_KEY
|
||||
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 6
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first',
|
||||
failure_action: 'rollback'
|
||||
}
|
||||
}
|
||||
function delay (t) {
|
||||
return new Promise(function (resolve) {
|
||||
setTimeout(function () {
|
||||
resolve('OK')
|
||||
}, t)
|
||||
})
|
||||
}
|
||||
|
||||
async function verifyUserId (authorization) {
|
||||
try {
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
function execShellAsync (cmd, opts = {}) {
|
||||
try {
|
||||
return new Promise(function (resolve, reject) {
|
||||
shell.config.silent = true
|
||||
shell.exec(cmd, opts, function (code, stdout, stderr) {
|
||||
if (code !== 0) return reject(new Error(stderr))
|
||||
return resolve(stdout)
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
return new Error('Oops')
|
||||
}
|
||||
}
|
||||
function cleanupTmp (dir) {
|
||||
if (dir !== '/') shell.rm('-fr', dir)
|
||||
}
|
||||
|
||||
async function checkImageAvailable (name) {
|
||||
let cacheAvailable = false
|
||||
try {
|
||||
await docker.engine.getImage(name).get()
|
||||
cacheAvailable = true
|
||||
} catch (e) {
|
||||
// Cache image not found
|
||||
}
|
||||
return cacheAvailable
|
||||
}
|
||||
|
||||
function encryptData (text) {
|
||||
const iv = crypto.randomBytes(16)
|
||||
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
|
||||
let encrypted = cipher.update(text)
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()])
|
||||
return { iv: iv.toString('hex'), encryptedData: encrypted.toString('hex') }
|
||||
}
|
||||
|
||||
function decryptData (text) {
|
||||
const iv = Buffer.from(text.iv, 'hex')
|
||||
const encryptedText = Buffer.from(text.encryptedData, 'hex')
|
||||
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
|
||||
let decrypted = decipher.update(encryptedText)
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()])
|
||||
return decrypted.toString()
|
||||
}
|
||||
|
||||
function createToken (payload) {
|
||||
const { uuid } = payload
|
||||
return jsonwebtoken.sign({}, process.env.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolify',
|
||||
issuer: 'coolify',
|
||||
jwtid: uuid,
|
||||
subject: `User:${uuid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
delay,
|
||||
createToken,
|
||||
execShellAsync,
|
||||
cleanupTmp,
|
||||
checkImageAvailable,
|
||||
encryptData,
|
||||
decryptData,
|
||||
verifyUserId,
|
||||
baseServiceConfiguration
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const Dockerode = require('dockerode')
|
||||
const { saveAppLog } = require('./logging')
|
||||
|
||||
const docker = {
|
||||
engine: new Dockerode({
|
||||
socketPath: process.env.DOCKER_ENGINE
|
||||
}),
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
saveAppLog(event.error, configuration, true)
|
||||
reject(event.error)
|
||||
} else if (event.stream) {
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
@@ -1,75 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.handleErrors = exports.handleValidationError = exports.handleNotFoundError = void 0;
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
const validation_1 = require("./validation");
|
||||
function handleNotFoundError(request, reply) {
|
||||
handleErrors(new http_errors_enhanced_1.NotFoundError('Not found.'), request, reply);
|
||||
}
|
||||
exports.handleNotFoundError = handleNotFoundError;
|
||||
function handleValidationError(error, request) {
|
||||
/*
|
||||
As seen in https://github.com/fastify/fastify/blob/master/lib/validation.js
|
||||
the error.message will always start with the relative section (params, querystring, headers, body)
|
||||
and fastify throws on first failing section.
|
||||
*/
|
||||
const section = error.message.match(/^\w+/)[0];
|
||||
return new http_errors_enhanced_1.BadRequestError('One or more validations failed trying to process your request.', {
|
||||
failedValidations: validation_1.convertValidationErrors(section, Reflect.get(request, section), error.validation)
|
||||
});
|
||||
}
|
||||
exports.handleValidationError = handleValidationError;
|
||||
function handleErrors(error, request, reply) {
|
||||
var _a, _b;
|
||||
// It is a generic error, handle it
|
||||
const code = error.code;
|
||||
if (!('statusCode' in error)) {
|
||||
if ('validation' in error && ((_a = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _a === void 0 ? void 0 : _a.convertValidationErrors)) {
|
||||
// If it is a validation error, convert errors to human friendly format
|
||||
error = handleValidationError(error, request);
|
||||
}
|
||||
else if ((_b = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _b === void 0 ? void 0 : _b.hideUnhandledErrors) {
|
||||
// It is requested to hide the error, just log it and then create a generic one
|
||||
request.log.error({ error: http_errors_enhanced_1.serializeError(error) });
|
||||
error = new http_errors_enhanced_1.InternalServerError('An error occurred trying to process your request.');
|
||||
}
|
||||
else {
|
||||
// Wrap in a HttpError, making the stack explicitily available
|
||||
error = new http_errors_enhanced_1.InternalServerError(http_errors_enhanced_1.serializeError(error));
|
||||
Object.defineProperty(error, 'stack', { enumerable: true });
|
||||
}
|
||||
}
|
||||
else if (code === 'INVALID_CONTENT_TYPE' || code === 'FST_ERR_CTP_INVALID_MEDIA_TYPE') {
|
||||
error = new http_errors_enhanced_1.UnsupportedMediaTypeError(utils_1.upperFirst(validation_1.validationMessagesFormatters.contentType()));
|
||||
}
|
||||
else if (code === 'FST_ERR_CTP_EMPTY_JSON_BODY') {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.jsonEmpty()));
|
||||
}
|
||||
else if (code === 'MALFORMED_JSON' || error.message === 'Invalid JSON' || error.stack.includes('at JSON.parse')) {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.json()));
|
||||
}
|
||||
// Get the status code
|
||||
let { statusCode, headers } = error;
|
||||
// Code outside HTTP range
|
||||
if (statusCode < 100 || statusCode > 599) {
|
||||
statusCode = http_errors_enhanced_1.INTERNAL_SERVER_ERROR;
|
||||
}
|
||||
// Create the body
|
||||
const body = {
|
||||
statusCode,
|
||||
error: http_errors_enhanced_1.messagesByCodes[statusCode],
|
||||
message: error.message
|
||||
};
|
||||
http_errors_enhanced_1.addAdditionalProperties(body, error);
|
||||
// Send the error back
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
reply
|
||||
.code(statusCode)
|
||||
.headers(headers !== null && headers !== void 0 ? headers : {})
|
||||
.type('application/json')
|
||||
.send(body);
|
||||
}
|
||||
exports.handleErrors = handleErrors;
|
||||
@@ -1,58 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.plugin = exports.validationMessagesFormatters = exports.niceJoin = exports.convertValidationErrors = void 0;
|
||||
const fastify_plugin_1 = __importDefault(require("fastify-plugin"));
|
||||
const handlers_1 = require("./handlers");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const validation_1 = require("./validation");
|
||||
__exportStar(require("./handlers"), exports);
|
||||
__exportStar(require("./interfaces"), exports);
|
||||
var validation_2 = require("./validation");
|
||||
Object.defineProperty(exports, "convertValidationErrors", { enumerable: true, get: function () { return validation_2.convertValidationErrors; } });
|
||||
Object.defineProperty(exports, "niceJoin", { enumerable: true, get: function () { return validation_2.niceJoin; } });
|
||||
Object.defineProperty(exports, "validationMessagesFormatters", { enumerable: true, get: function () { return validation_2.validationMessagesFormatters; } });
|
||||
exports.plugin = fastify_plugin_1.default(function (instance, options, done) {
|
||||
var _a, _b, _c, _d;
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const convertResponsesValidationErrors = (_a = options.convertResponsesValidationErrors) !== null && _a !== void 0 ? _a : !isProduction;
|
||||
const configuration = {
|
||||
hideUnhandledErrors: (_b = options.hideUnhandledErrors) !== null && _b !== void 0 ? _b : isProduction,
|
||||
convertValidationErrors: (_c = options.convertValidationErrors) !== null && _c !== void 0 ? _c : true,
|
||||
responseValidatorCustomizer: options.responseValidatorCustomizer,
|
||||
allowUndeclaredResponses: (_d = options.allowUndeclaredResponses) !== null && _d !== void 0 ? _d : false
|
||||
};
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.decorateRequest(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.addHook('onRequest', async (request) => {
|
||||
request[interfaces_1.kHttpErrorsEnhancedConfiguration] = configuration;
|
||||
});
|
||||
instance.setErrorHandler(handlers_1.handleErrors);
|
||||
// instance.setNotFoundHandler(handlers_1.handleNotFoundError);
|
||||
if (convertResponsesValidationErrors) {
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedResponseValidations, []);
|
||||
instance.addHook('onRoute', validation_1.addResponseValidation);
|
||||
instance.addHook('onReady', validation_1.compileResponseValidationSchema.bind(instance, configuration));
|
||||
}
|
||||
done();
|
||||
}, { name: 'fastify-http-errors-enhanced' });
|
||||
exports.default = exports.plugin;
|
||||
// Fix CommonJS exporting
|
||||
/* istanbul ignore else */
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = exports.plugin;
|
||||
Object.assign(module.exports, exports);
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.kHttpErrorsEnhancedResponseValidations = exports.kHttpErrorsEnhancedConfiguration = void 0;
|
||||
exports.kHttpErrorsEnhancedConfiguration = Symbol('fastify-http-errors-enhanced-configuration');
|
||||
exports.kHttpErrorsEnhancedResponseValidations = Symbol('fastify-http-errors-enhanced-response-validation');
|
||||
@@ -1,31 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.get = exports.upperFirst = void 0;
|
||||
function upperFirst(source) {
|
||||
if (typeof source !== 'string' || !source.length) {
|
||||
return source;
|
||||
}
|
||||
return source[0].toUpperCase() + source.substring(1);
|
||||
}
|
||||
exports.upperFirst = upperFirst;
|
||||
function get(target, path) {
|
||||
var _a;
|
||||
const tokens = path.split('.').map((t) => t.trim());
|
||||
for (const token of tokens) {
|
||||
if (typeof target === 'undefined' || target === null) {
|
||||
// We're supposed to be still iterating, but the chain is over - Return undefined
|
||||
target = undefined;
|
||||
break;
|
||||
}
|
||||
const index = token.match(/^(\d+)|(?:\[(\d+)\])$/);
|
||||
if (index) {
|
||||
target = target[parseInt((_a = index[1]) !== null && _a !== void 0 ? _a : index[2], 10)];
|
||||
}
|
||||
else {
|
||||
target = target[token];
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
exports.get = get;
|
||||
@@ -1,239 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.compileResponseValidationSchema = exports.addResponseValidation = exports.convertValidationErrors = exports.validationMessagesFormatters = exports.niceJoin = void 0;
|
||||
const ajv_1 = __importDefault(require("ajv"));
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
function niceJoin(array, lastSeparator = ' and ', separator = ', ') {
|
||||
switch (array.length) {
|
||||
case 0:
|
||||
return '';
|
||||
case 1:
|
||||
return array[0];
|
||||
case 2:
|
||||
return array.join(lastSeparator);
|
||||
default:
|
||||
return array.slice(0, array.length - 1).join(separator) + lastSeparator + array[array.length - 1];
|
||||
}
|
||||
}
|
||||
exports.niceJoin = niceJoin;
|
||||
exports.validationMessagesFormatters = {
|
||||
contentType: () => 'only JSON payloads are accepted. Please set the "Content-Type" header to start with "application/json"',
|
||||
json: () => 'the body payload is not a valid JSON',
|
||||
jsonEmpty: () => 'the JSON body payload cannot be empty if the "Content-Type" header is set',
|
||||
missing: () => 'must be present',
|
||||
unknown: () => 'is not a valid property',
|
||||
uuid: () => 'must be a valid GUID (UUID v4)',
|
||||
timestamp: () => 'must be a valid ISO 8601 / RFC 3339 timestamp (example: 2018-07-06T12:34:56Z)',
|
||||
date: () => 'must be a valid ISO 8601 / RFC 3339 date (example: 2018-07-06)',
|
||||
time: () => 'must be a valid ISO 8601 / RFC 3339 time (example: 12:34:56)',
|
||||
uri: () => 'must be a valid URI',
|
||||
hostname: () => 'must be a valid hostname',
|
||||
ipv4: () => 'must be a valid IPv4',
|
||||
ipv6: () => 'must be a valid IPv6',
|
||||
paramType: (type) => {
|
||||
switch (type) {
|
||||
case 'integer':
|
||||
return 'must be a valid integer number';
|
||||
case 'number':
|
||||
return 'must be a valid number';
|
||||
case 'boolean':
|
||||
return 'must be a valid boolean (true or false)';
|
||||
case 'object':
|
||||
return 'must be a object';
|
||||
case 'array':
|
||||
return 'must be an array';
|
||||
default:
|
||||
return 'must be a string';
|
||||
}
|
||||
},
|
||||
presentString: () => 'must be a non empty string',
|
||||
minimum: (min) => `must be a number greater than or equal to ${min}`,
|
||||
maximum: (max) => `must be a number less than or equal to ${max}`,
|
||||
minimumProperties(min) {
|
||||
return min === 1 ? 'cannot be a empty object' : `must be a object with at least ${min} properties`;
|
||||
},
|
||||
maximumProperties(max) {
|
||||
return max === 0 ? 'must be a empty object' : `must be a object with at most ${max} properties`;
|
||||
},
|
||||
minimumItems(min) {
|
||||
return min === 1 ? 'cannot be a empty array' : `must be an array with at least ${min} items`;
|
||||
},
|
||||
maximumItems(max) {
|
||||
return max === 0 ? 'must be a empty array' : `must be an array with at most ${max} items`;
|
||||
},
|
||||
enum: (values) => `must be one of the following values: ${niceJoin(values.map((f) => `"${f}"`), ' or ')}`,
|
||||
pattern: (pattern) => `must match pattern "${pattern.replace(/\(\?:/g, '(')}"`,
|
||||
invalidResponseCode: (code) => `This endpoint cannot respond with HTTP status ${code}.`,
|
||||
invalidResponse: (code) => `The response returned from the endpoint violates its specification for the HTTP status ${code}.`,
|
||||
invalidFormat: (format) => `must match format "${format}" (format)`
|
||||
};
|
||||
function convertValidationErrors(section, data, validationErrors) {
|
||||
const errors = {};
|
||||
if (section === 'querystring') {
|
||||
section = 'query';
|
||||
}
|
||||
// For each error
|
||||
for (const e of validationErrors) {
|
||||
let message = '';
|
||||
let pattern;
|
||||
let value;
|
||||
let reason;
|
||||
// Normalize the key
|
||||
let key = e.dataPath;
|
||||
if (key.startsWith('.')) {
|
||||
key = key.substring(1);
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.startsWith('[') && key.endsWith(']')) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Depending on the type
|
||||
switch (e.keyword) {
|
||||
case 'required':
|
||||
case 'dependencies':
|
||||
key = e.params.missingProperty;
|
||||
message = exports.validationMessagesFormatters.missing();
|
||||
break;
|
||||
case 'additionalProperties':
|
||||
key = e.params.additionalProperty;
|
||||
message = exports.validationMessagesFormatters.unknown();
|
||||
break;
|
||||
case 'type':
|
||||
message = exports.validationMessagesFormatters.paramType(e.params.type);
|
||||
break;
|
||||
case 'minProperties':
|
||||
message = exports.validationMessagesFormatters.minimumProperties(e.params.limit);
|
||||
break;
|
||||
case 'maxProperties':
|
||||
message = exports.validationMessagesFormatters.maximumProperties(e.params.limit);
|
||||
break;
|
||||
case 'minItems':
|
||||
message = exports.validationMessagesFormatters.minimumItems(e.params.limit);
|
||||
break;
|
||||
case 'maxItems':
|
||||
message = exports.validationMessagesFormatters.maximumItems(e.params.limit);
|
||||
break;
|
||||
case 'minimum':
|
||||
message = exports.validationMessagesFormatters.minimum(e.params.limit);
|
||||
break;
|
||||
case 'maximum':
|
||||
message = exports.validationMessagesFormatters.maximum(e.params.limit);
|
||||
break;
|
||||
case 'enum':
|
||||
message = exports.validationMessagesFormatters.enum(e.params.allowedValues);
|
||||
break;
|
||||
case 'pattern':
|
||||
pattern = e.params.pattern;
|
||||
value = utils_1.get(data, key);
|
||||
if (pattern === '.+' && !value) {
|
||||
message = exports.validationMessagesFormatters.presentString();
|
||||
}
|
||||
else {
|
||||
message = exports.validationMessagesFormatters.pattern(e.params.pattern);
|
||||
}
|
||||
break;
|
||||
case 'format':
|
||||
reason = e.params.format;
|
||||
// Normalize the key
|
||||
if (reason === 'date-time') {
|
||||
reason = 'timestamp';
|
||||
}
|
||||
message = (exports.validationMessagesFormatters[reason] || exports.validationMessagesFormatters.invalidFormat)(reason);
|
||||
break;
|
||||
}
|
||||
// No custom message was found, default to input one replacing the starting verb and adding some path info
|
||||
if (!message.length) {
|
||||
message = `${e.message.replace(/^should/, 'must')} (${e.keyword})`;
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.match(/(?:^['"])(?:[^.]+)(?:['"]$)/)) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Fix empty properties
|
||||
if (!key) {
|
||||
key = '$root';
|
||||
}
|
||||
key = key.replace(/^\//, '');
|
||||
errors[key] = message;
|
||||
}
|
||||
return { [section]: errors };
|
||||
}
|
||||
exports.convertValidationErrors = convertValidationErrors;
|
||||
function addResponseValidation(route) {
|
||||
var _a;
|
||||
if (!((_a = route.schema) === null || _a === void 0 ? void 0 : _a.response)) {
|
||||
return;
|
||||
}
|
||||
const validators = {};
|
||||
/*
|
||||
Add these validators to the list of the one to compile once the server is started.
|
||||
This makes possible to handle shared schemas.
|
||||
*/
|
||||
this[interfaces_1.kHttpErrorsEnhancedResponseValidations].push([
|
||||
this,
|
||||
validators,
|
||||
Object.entries(route.schema.response)
|
||||
]);
|
||||
// Note that this hook is not called for non JSON payloads therefore validation is not possible in such cases
|
||||
route.preSerialization = async function (request, reply, payload) {
|
||||
const statusCode = reply.raw.statusCode;
|
||||
// Never validate error 500
|
||||
if (statusCode === http_errors_enhanced_1.INTERNAL_SERVER_ERROR) {
|
||||
return payload;
|
||||
}
|
||||
// No validator, it means the HTTP status is not allowed
|
||||
const validator = validators[statusCode];
|
||||
if (!validator) {
|
||||
if (request[interfaces_1.kHttpErrorsEnhancedConfiguration].allowUndeclaredResponses) {
|
||||
return payload;
|
||||
}
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponseCode(statusCode));
|
||||
}
|
||||
// Now validate the payload
|
||||
const valid = validator(payload);
|
||||
if (!valid) {
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponse(statusCode), {
|
||||
failedValidations: convertValidationErrors('response', payload, validator.errors)
|
||||
});
|
||||
}
|
||||
return payload;
|
||||
};
|
||||
}
|
||||
exports.addResponseValidation = addResponseValidation;
|
||||
function compileResponseValidationSchema(configuration) {
|
||||
// Fix CJS/ESM interoperability
|
||||
// @ts-expect-error
|
||||
let AjvConstructor = ajv_1.default;
|
||||
/* istanbul ignore next */
|
||||
if (AjvConstructor.default) {
|
||||
AjvConstructor = AjvConstructor.default;
|
||||
}
|
||||
const hasCustomizer = typeof configuration.responseValidatorCustomizer === 'function';
|
||||
for (const [instance, validators, schemas] of this[interfaces_1.kHttpErrorsEnhancedResponseValidations]) {
|
||||
// @ts-expect-error
|
||||
const compiler = new AjvConstructor({
|
||||
// The fastify defaults, with the exception of removeAdditional and coerceTypes, which have been reversed
|
||||
removeAdditional: false,
|
||||
useDefaults: true,
|
||||
coerceTypes: false,
|
||||
allErrors: true
|
||||
});
|
||||
compiler.addSchema(Object.values(instance.getSchemas()));
|
||||
compiler.addKeyword('example');
|
||||
if (hasCustomizer) {
|
||||
configuration.responseValidatorCustomizer(compiler);
|
||||
}
|
||||
for (const [code, schema] of schemas) {
|
||||
validators[code] = compiler.compile(schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.compileResponseValidationSchema = compileResponseValidationSchema;
|
||||
@@ -1,48 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
const axios = require('axios')
|
||||
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const Settings = require('../models/Settings')
|
||||
const { version } = require('../../package.json')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
const patterns = [
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
||||
].join('|')
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
const clearedEvent = '[ERROR 😱] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog (error) {
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const payload = { message: error.message, stack: error.stack, type: error.type || 'spaghetticode', version }
|
||||
|
||||
const found = await ServerLog.find(payload)
|
||||
if (found.length === 0 && error.message) await new ServerLog(payload).save()
|
||||
if (settings && settings.sendErrors && process.env.NODE_ENV === 'production') await axios.post('https://errors.coollabs.io/api/error', payload)
|
||||
}
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
}
|
||||
@@ -1,185 +0,0 @@
|
||||
const { execShellAsync, cleanupTmp, baseServiceConfiguration } = require('../../common')
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const generator = require('generate-password')
|
||||
const { docker } = require('../../docker')
|
||||
|
||||
async function plausible ({ email, userName, userPassword, baseURL, traefikURL }) {
|
||||
const deployId = 'plausible'
|
||||
const workdir = '/tmp/plausible'
|
||||
const secretKey = generator.generate({ length: 64, numbers: true, strict: true })
|
||||
const generateEnvsPostgres = {
|
||||
POSTGRESQL_PASSWORD: generator.generate({ length: 24, numbers: true, strict: true }),
|
||||
POSTGRESQL_USERNAME: generator.generate({ length: 10, numbers: true, strict: true }),
|
||||
POSTGRESQL_DATABASE: 'plausible'
|
||||
}
|
||||
|
||||
const secrets = [
|
||||
{ name: 'ADMIN_USER_EMAIL', value: email },
|
||||
{ name: 'ADMIN_USER_NAME', value: userName },
|
||||
{ name: 'ADMIN_USER_PWD', value: userPassword },
|
||||
{ name: 'BASE_URL', value: baseURL },
|
||||
{ name: 'SECRET_KEY_BASE', value: secretKey },
|
||||
{ name: 'DISABLE_AUTH', value: 'false' },
|
||||
{ name: 'DISABLE_REGISTRATION', value: 'true' },
|
||||
{ name: 'DATABASE_URL', value: `postgresql://${generateEnvsPostgres.POSTGRESQL_USERNAME}:${generateEnvsPostgres.POSTGRESQL_PASSWORD}@plausible_db:5432/${generateEnvsPostgres.POSTGRESQL_DATABASE}` },
|
||||
{ name: 'CLICKHOUSE_DATABASE_URL', value: 'http://plausible_events_db:8123/plausible' }
|
||||
]
|
||||
|
||||
const generateEnvsClickhouse = {}
|
||||
for (const secret of secrets) generateEnvsClickhouse[secret.name] = secret.value
|
||||
|
||||
const clickhouseConfigXml = `
|
||||
<yandex>
|
||||
<logger>
|
||||
<level>warning</level>
|
||||
<console>true</console>
|
||||
</logger>
|
||||
|
||||
<!-- Stop all the unnecessary logging -->
|
||||
<query_thread_log remove="remove"/>
|
||||
<query_log remove="remove"/>
|
||||
<text_log remove="remove"/>
|
||||
<trace_log remove="remove"/>
|
||||
<metric_log remove="remove"/>
|
||||
<asynchronous_metric_log remove="remove"/>
|
||||
</yandex>`
|
||||
const clickhouseUserConfigXml = `
|
||||
<yandex>
|
||||
<profiles>
|
||||
<default>
|
||||
<log_queries>0</log_queries>
|
||||
<log_query_threads>0</log_query_threads>
|
||||
</default>
|
||||
</profiles>
|
||||
</yandex>`
|
||||
|
||||
const clickhouseConfigs = [
|
||||
{ source: 'plausible-clickhouse-user-config.xml', target: '/etc/clickhouse-server/users.d/logging.xml' },
|
||||
{ source: 'plausible-clickhouse-config.xml', target: '/etc/clickhouse-server/config.d/logging.xml' },
|
||||
{ source: 'plausible-init.query', target: '/docker-entrypoint-initdb.d/init.query' },
|
||||
{ source: 'plausible-init-db.sh', target: '/docker-entrypoint-initdb.d/init-db.sh' }
|
||||
]
|
||||
|
||||
const initQuery = 'CREATE DATABASE IF NOT EXISTS plausible;'
|
||||
const initScript = 'clickhouse client --queries-file /docker-entrypoint-initdb.d/init.query'
|
||||
await execShellAsync(`mkdir -p ${workdir}`)
|
||||
await fs.writeFile(`${workdir}/clickhouse-config.xml`, clickhouseConfigXml)
|
||||
await fs.writeFile(`${workdir}/clickhouse-user-config.xml`, clickhouseUserConfigXml)
|
||||
await fs.writeFile(`${workdir}/init.query`, initQuery)
|
||||
await fs.writeFile(`${workdir}/init-db.sh`, initScript)
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[deployId]: {
|
||||
image: 'plausible/analytics:latest',
|
||||
command: 'sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db migrate && /entrypoint.sh db init-admin && /entrypoint.sh run"',
|
||||
networks: [`${docker.network}`],
|
||||
volumes: [`${deployId}-postgres-data:/var/lib/postgresql/data`],
|
||||
environment: generateEnvsClickhouse,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=service',
|
||||
'serviceName=plausible',
|
||||
'configuration=' + JSON.stringify({ email, userName, userPassword, baseURL, secretKey, generateEnvsPostgres, generateEnvsClickhouse }),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
deployId +
|
||||
'.loadbalancer.server.port=8000',
|
||||
'traefik.http.routers.' +
|
||||
deployId +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
deployId +
|
||||
'.rule=Host(`' +
|
||||
traefikURL +
|
||||
'`) && PathPrefix(`/`)',
|
||||
'traefik.http.routers.' +
|
||||
deployId +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
deployId +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
},
|
||||
plausible_db: {
|
||||
image: 'bitnami/postgresql:13.2.0',
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvsPostgres,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=service',
|
||||
'serviceName=plausible'
|
||||
]
|
||||
}
|
||||
},
|
||||
plausible_events_db: {
|
||||
image: 'yandex/clickhouse-server:21.3.2.5',
|
||||
networks: [`${docker.network}`],
|
||||
volumes: [`${deployId}-clickhouse-data:/var/lib/clickhouse`],
|
||||
ulimits: {
|
||||
nofile: {
|
||||
soft: 262144,
|
||||
hard: 262144
|
||||
}
|
||||
},
|
||||
configs: [...clickhouseConfigs],
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=service',
|
||||
'serviceName=plausible'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${deployId}-clickhouse-data`]: {
|
||||
external: true
|
||||
},
|
||||
[`${deployId}-postgres-data`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
configs: {
|
||||
'plausible-clickhouse-user-config.xml': {
|
||||
file: `${workdir}/clickhouse-user-config.xml`
|
||||
},
|
||||
'plausible-clickhouse-config.xml': {
|
||||
file: `${workdir}/clickhouse-config.xml`
|
||||
},
|
||||
'plausible-init.query': {
|
||||
file: `${workdir}/init.query`
|
||||
},
|
||||
'plausible-init-db.sh': {
|
||||
file: `${workdir}/init-db.sh`
|
||||
}
|
||||
}
|
||||
}
|
||||
await fs.writeFile(`${workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync('docker stack rm plausible')
|
||||
await execShellAsync(
|
||||
`cat ${workdir}/stack.yml | docker stack deploy --prune -c - ${deployId}`
|
||||
)
|
||||
cleanupTmp(workdir)
|
||||
}
|
||||
|
||||
async function activateAdminUser () {
|
||||
const { POSTGRESQL_USERNAME, POSTGRESQL_PASSWORD, POSTGRESQL_DATABASE } = JSON.parse(JSON.parse((await execShellAsync('docker service inspect plausible_plausible --format=\'{{json .Spec.Labels.configuration}}\'')))).generateEnvsPostgres
|
||||
const containers = (await execShellAsync('docker ps -a --format=\'{{json .Names}}\'')).replace(/"/g, '').trim().split('\n')
|
||||
const postgresDB = containers.find(container => container.startsWith('plausible_plausible_db'))
|
||||
await execShellAsync(`docker exec ${postgresDB} psql -H postgresql://${POSTGRESQL_USERNAME}:${POSTGRESQL_PASSWORD}@localhost:5432/${POSTGRESQL_DATABASE} -c "UPDATE users SET email_verified = true;"`)
|
||||
}
|
||||
|
||||
module.exports = { plausible, activateAdminUser }
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const deploymentSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
nickname: { type: String, required: true },
|
||||
repoId: { type: Number, required: true },
|
||||
organization: { type: String, required: true },
|
||||
name: { type: String, required: true },
|
||||
branch: { type: String, required: true },
|
||||
domain: { type: String, required: true },
|
||||
progress: { type: String, require: true, default: 'queued' }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('deployment', deploymentSchema)
|
||||
@@ -1,10 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
event: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-application', logSchema)
|
||||
@@ -1,14 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, default: version },
|
||||
type: { type: String, required: true },
|
||||
message: { type: String, required: true },
|
||||
stack: { type: String },
|
||||
seen: { type: Boolean, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-server', logSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false },
|
||||
sendErrors: { type: Boolean, required: true, default: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('settings', settingsSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
email: { type: String, required: true },
|
||||
avatar: { type: String },
|
||||
uid: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('user', userSchema)
|
||||
@@ -1,21 +0,0 @@
|
||||
const fp = require('fastify-plugin')
|
||||
const User = require('../models/User')
|
||||
module.exports = fp(async function (fastify, options, next) {
|
||||
fastify.register(require('fastify-jwt'), {
|
||||
secret: fastify.config.JWT_SIGN_KEY
|
||||
})
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
try {
|
||||
const { jti } = await request.jwtVerify()
|
||||
const found = await User.findOne({ uid: jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
} catch (err) {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
})
|
||||
next()
|
||||
})
|
||||
@@ -1,37 +0,0 @@
|
||||
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
try {
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
let foundDomain = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id &&
|
||||
running.publish.path === configuration.publish.path
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
|
||||
if (foundDomain) {
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
return { message: 'OK' }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
|
||||
const { purgeImagesContainers } = require('../../../../libs/applications/cleanup')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../../libs/logging')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
let configuration
|
||||
try {
|
||||
await verifyUserId(request.headers.authorization)
|
||||
} catch (error) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
configuration = setDefaultConfiguration(request.body)
|
||||
if (!configuration) {
|
||||
throw new Error('Whaat?')
|
||||
}
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name, deployId: configuration.general.deployId })
|
||||
await queueAndBuild(configuration, imageChanged)
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
if (error.name) {
|
||||
if (error.message && error.stack) await saveServerLog(error)
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
} finally {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
await purgeImagesContainers(configuration)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const getLogSchema = {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
repoId: { type: 'string' },
|
||||
branch: { type: 'string' }
|
||||
},
|
||||
required: ['repoId', 'branch']
|
||||
}
|
||||
}
|
||||
fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
|
||||
try {
|
||||
const { repoId, branch, page } = request.query
|
||||
const onePage = 5
|
||||
const show = Number(page) * onePage || 5
|
||||
const deploy = await Deployment.find({ repoId, branch })
|
||||
.select('-_id -__v -repoId')
|
||||
.sort({ createdAt: 'desc' })
|
||||
.limit(show)
|
||||
|
||||
const finalLogs = deploy.map(d => {
|
||||
const finalLogs = { ...d._doc }
|
||||
|
||||
const updatedAt = dayjs(d.updatedAt).utc()
|
||||
|
||||
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
|
||||
finalLogs.since = updatedAt.fromNow()
|
||||
|
||||
return finalLogs
|
||||
})
|
||||
return finalLogs
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const logs = await ApplicationLog.find({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'asc' })
|
||||
|
||||
const deploy = await Deployment.findOne({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'desc' })
|
||||
|
||||
const finalLogs = {}
|
||||
finalLogs.progress = deploy.progress
|
||||
finalLogs.events = logs.map(log => log.event)
|
||||
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt))
|
||||
return finalLogs
|
||||
} catch (e) {
|
||||
throw new Error('No logs found')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { name } = request.query
|
||||
const service = await docker.engine.getService(`${name}_${name}`)
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { organization, name, branch } = request.body
|
||||
let found = false
|
||||
try {
|
||||
(await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.organization === organization &&
|
||||
running.repository.name === name &&
|
||||
running.repository.branch === branch) {
|
||||
found = running
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const deploys = await Deployment.find({ organization, branch, name })
|
||||
for (const deploy of deploys) {
|
||||
await ApplicationLog.deleteMany({ deployId: deploy.deployId })
|
||||
await Deployment.deleteMany({ deployId: deploy.deployId })
|
||||
}
|
||||
await execShellAsync(`docker stack rm ${found.build.container.name}`)
|
||||
reply.code(200).send({ organization, name, branch })
|
||||
} else {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
} catch (error) {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const { docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { name, organization, branch } = request.body
|
||||
const services = await docker.engine.listServices()
|
||||
const applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
const found = applications.find(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
if (branch) {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization && configuration.repository.branch === branch) {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
return JSON.parse(found.Spec.Labels.configuration)
|
||||
} else {
|
||||
reply.code(500).send({ message: 'No configuration found.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const latestDeployments = await Deployment.aggregate([
|
||||
{
|
||||
$sort: { createdAt: -1 }
|
||||
},
|
||||
{
|
||||
$group:
|
||||
{
|
||||
_id: {
|
||||
repoId: '$repoId',
|
||||
branch: '$branch'
|
||||
},
|
||||
createdAt: { $last: '$createdAt' },
|
||||
progress: { $first: '$progress' }
|
||||
}
|
||||
}
|
||||
])
|
||||
const serverLogs = await ServerLog.find()
|
||||
const dockerServices = await docker.engine.listServices()
|
||||
let applications = dockerServices.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = dockerServices.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
let services = dockerServices.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'service' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
if (JSON.parse(r.Spec.Labels.configuration)) {
|
||||
const configuration = JSON.parse(r.Spec.Labels.configuration)
|
||||
const status = latestDeployments.find(l => configuration.repository.id === l._id.repoId && configuration.repository.branch === l._id.branch)
|
||||
if (status && status.progress) r.progress = status.progress
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
}
|
||||
return {}
|
||||
})
|
||||
databases = databases.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
services = services.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain + item.Spec.Labels.configuration.publish.path, item])).values()]
|
||||
return {
|
||||
serverLogs,
|
||||
applications: {
|
||||
deployed: applications
|
||||
},
|
||||
databases: {
|
||||
deployed: databases
|
||||
},
|
||||
services: {
|
||||
deployed: services
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && error.errno === -2) {
|
||||
throw new Error(`Docker service unavailable at ${error.address}.`)
|
||||
} else {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const database = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId)
|
||||
if (database) {
|
||||
const jsonEnvs = {}
|
||||
if (database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
const s = d.split('=')
|
||||
jsonEnvs[s[0]] = s[1]
|
||||
}
|
||||
}
|
||||
const payload = {
|
||||
config: JSON.parse(database.Spec.Labels.configuration),
|
||||
envs: jsonEnvs || null
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('No database found?')
|
||||
}
|
||||
})
|
||||
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: { type: 'string', enum: ['mongodb', 'postgresql', 'mysql', 'couchdb', 'clickhouse'] }
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
let { type, defaultDatabaseName } = request.body
|
||||
const passwords = generator.generateMultiple(2, {
|
||||
length: 24,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
const usernames = generator.generateMultiple(2, {
|
||||
length: 10,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
// TODO: Query for existing db with the same name
|
||||
const nickname = getUniq()
|
||||
|
||||
if (!defaultDatabaseName) defaultDatabaseName = nickname
|
||||
|
||||
reply.code(201).send({ message: 'Deploying.' })
|
||||
// TODO: Persistent volume, custom inputs
|
||||
const deployId = cuid()
|
||||
const configuration = {
|
||||
general: {
|
||||
workdir: `/tmp/${deployId}`,
|
||||
deployId,
|
||||
nickname,
|
||||
type
|
||||
},
|
||||
database: {
|
||||
usernames,
|
||||
passwords,
|
||||
defaultDatabaseName
|
||||
},
|
||||
deploy: {
|
||||
name: nickname
|
||||
}
|
||||
}
|
||||
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
|
||||
let generateEnvs = {}
|
||||
let image = null
|
||||
let volume = null
|
||||
let ulimits = {}
|
||||
if (type === 'mongodb') {
|
||||
generateEnvs = {
|
||||
MONGODB_ROOT_PASSWORD: passwords[0],
|
||||
MONGODB_USERNAME: usernames[0],
|
||||
MONGODB_PASSWORD: passwords[1],
|
||||
MONGODB_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mongodb:4.4'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
|
||||
} else if (type === 'postgresql') {
|
||||
generateEnvs = {
|
||||
POSTGRESQL_PASSWORD: passwords[0],
|
||||
POSTGRESQL_USERNAME: usernames[0],
|
||||
POSTGRESQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/postgresql:13.2.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
|
||||
} else if (type === 'couchdb') {
|
||||
generateEnvs = {
|
||||
COUCHDB_PASSWORD: passwords[0],
|
||||
COUCHDB_USER: usernames[0]
|
||||
}
|
||||
image = 'bitnami/couchdb:3'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
|
||||
} else if (type === 'mysql') {
|
||||
generateEnvs = {
|
||||
MYSQL_ROOT_PASSWORD: passwords[0],
|
||||
MYSQL_ROOT_USER: usernames[0],
|
||||
MYSQL_USER: usernames[1],
|
||||
MYSQL_PASSWORD: passwords[1],
|
||||
MYSQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mysql:8.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
|
||||
} else if (type === 'clickhouse') {
|
||||
image = 'yandex/clickhouse-server'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/var/lib/clickhouse`
|
||||
ulimits = {
|
||||
nofile: {
|
||||
soft: 262144,
|
||||
hard: 262144
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[configuration.general.deployId]: {
|
||||
image,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
volumes: [volume],
|
||||
ulimits,
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
update_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=database',
|
||||
'configuration=' + JSON.stringify(configuration)
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${configuration.general.deployId}-${type}-data`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.delete('/:dbName', async (request, reply) => {
|
||||
const { dbName } = request.params
|
||||
await execShellAsync(`docker stack rm ${dbName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
const axios = require('axios')
|
||||
const User = require('../../../models/User')
|
||||
const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: { type: 'string' }
|
||||
},
|
||||
required: ['code']
|
||||
}
|
||||
}
|
||||
}
|
||||
fastify.get('/app', { schema: githubCodeSchema }, async (request, reply) => {
|
||||
const { code } = request.query
|
||||
try {
|
||||
const { data } = await axios({
|
||||
method: 'post',
|
||||
url: `https://github.com/login/oauth/access_token?client_id=${fastify.config.VITE_GITHUB_APP_CLIENTID}&client_secret=${fastify.config.GITHUB_APP_CLIENT_SECRET}&code=${code}`,
|
||||
headers: {
|
||||
accept: 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
const token = data.access_token
|
||||
const githubAxios = axios.create({
|
||||
baseURL: 'https://api.github.com'
|
||||
})
|
||||
|
||||
githubAxios.defaults.headers.common.Accept = 'Application/json'
|
||||
githubAxios.defaults.headers.common.Authorization = `token ${token}`
|
||||
|
||||
try {
|
||||
let uid = cuid()
|
||||
const { avatar_url } = (await githubAxios.get('/user')).data // eslint-disable-line
|
||||
const email = (await githubAxios.get('/user/emails')).data.filter(
|
||||
(e) => e.primary
|
||||
)[0].email
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const registeredUsers = await User.find().countDocuments()
|
||||
const foundUser = await User.findOne({ email })
|
||||
if (foundUser) {
|
||||
await User.findOneAndUpdate(
|
||||
{ email },
|
||||
{ avatar: avatar_url },
|
||||
{ upsert: true, new: true }
|
||||
)
|
||||
uid = foundUser.uid
|
||||
} else {
|
||||
if (registeredUsers === 0) {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
const defaultSettings = new Settings({
|
||||
_id: new mongoose.Types.ObjectId()
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
await defaultSettings.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (!settings && registeredUsers > 0) {
|
||||
reply.code(500).send('Registration disabled, enable it in settings.')
|
||||
} else {
|
||||
if (!settings.allowRegistration) {
|
||||
reply.code(500).send('You are not allowed here!')
|
||||
} else {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const jwtToken = jwt.sign({}, fastify.config.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolLabs',
|
||||
issuer: 'coolLabs',
|
||||
jwtid: uid,
|
||||
subject: `User:${uid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
reply
|
||||
.code(200)
|
||||
.redirect(
|
||||
302,
|
||||
`/api/v1/login/github/success?jwtToken=${jwtToken}&ghToken=${token}`
|
||||
)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
return reply.sendFile('bye.html')
|
||||
})
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
const Server = require('../../../models/Logs/Server')
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const serverLogs = await Server.find().select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
return {
|
||||
serverLogs
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
const { plausible, activateAdminUser } = require('../../../libs/services/plausible')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/plausible', async (request, reply) => {
|
||||
let { email, userName, userPassword, baseURL } = request.body
|
||||
const traefikURL = baseURL
|
||||
baseURL = `https://${baseURL}`
|
||||
await plausible({ email, userName, userPassword, baseURL, traefikURL })
|
||||
return {}
|
||||
})
|
||||
fastify.patch('/plausible/activate', async (request, reply) => {
|
||||
await activateAdminUser()
|
||||
return 'OK'
|
||||
})
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:serviceName', async (request, reply) => {
|
||||
const { serviceName } = request.params
|
||||
try {
|
||||
const service = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'service' && r.Spec.Labels.serviceName === serviceName && r.Spec.Name === `${serviceName}_${serviceName}`)
|
||||
if (service) {
|
||||
const payload = {
|
||||
config: JSON.parse(service.Spec.Labels.configuration)
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
throw new Error('No service found?')
|
||||
}
|
||||
})
|
||||
fastify.delete('/:serviceName', async (request, reply) => {
|
||||
const { serviceName } = request.params
|
||||
await execShellAsync(`docker stack rm ${serviceName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' },
|
||||
sendErrors: { type: 'boolean' }
|
||||
},
|
||||
required: []
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
let settings = await Settings.findOne({ applicationName }).select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
if (!settings) {
|
||||
settings = {
|
||||
applicationName,
|
||||
allowRegistration: false
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
const settings = await Settings.findOneAndUpdate(
|
||||
{ applicationName },
|
||||
{ applicationName, ...request.body },
|
||||
{ upsert: true, new: true }
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
reply.code(200).send('NO')
|
||||
})
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"')
|
||||
await saveServerLog({ message: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"')
|
||||
await saveServerLog({ message: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
const User = require('../../models/User')
|
||||
const jwt = require('jsonwebtoken')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { authorization } = request.headers
|
||||
if (!authorization) {
|
||||
reply.code(401).send({})
|
||||
return
|
||||
}
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
found ? reply.code(200).send({}) : reply.code(401).send({})
|
||||
} catch (error) {
|
||||
reply.code(401).send({})
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp } = require('../../../libs/common')
|
||||
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const cloneRepository = require('../../../libs/applications/github/cloneRepository')
|
||||
const { purgeImagesContainers } = require('../../../libs/applications/cleanup')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// TODO: Add this to fastify plugin
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ref: { type: 'string' },
|
||||
repository: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' },
|
||||
full_name: { type: 'string' }
|
||||
},
|
||||
required: ['id', 'full_name']
|
||||
},
|
||||
installation: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' }
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
required: ['ref', 'repository', 'installation']
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
let configuration
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
if (request.headers['x-github-event'] !== 'push') {
|
||||
reply.code(500).send({ error: 'Not a push event.' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
|
||||
) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
})
|
||||
if (!configuration) {
|
||||
reply.code(500).send({ error: 'No configuration found.' })
|
||||
return
|
||||
}
|
||||
|
||||
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
await queueAndBuild(configuration, imageChanged)
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
if (error.name === 'Error') {
|
||||
// Error during runtime
|
||||
await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
} else {
|
||||
// Error in my code
|
||||
const payload = { message: error.message, stack: error.stack, type: 'spaghetticode' }
|
||||
if (error.message && error.stack) await new ServerLog(payload).save()
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
} finally {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
await purgeImagesContainers(configuration)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: [
|
||||
'DOMAIN',
|
||||
'EMAIL',
|
||||
'VITE_GITHUB_APP_CLIENTID',
|
||||
'GITHUB_APP_CLIENT_SECRET',
|
||||
'GITHUB_APP_PRIVATE_KEY',
|
||||
'GITHUP_APP_WEBHOOK_SECRET',
|
||||
'JWT_SIGN_KEY',
|
||||
'SECRETS_ENCRYPTION_KEY'
|
||||
],
|
||||
properties: {
|
||||
DOMAIN: {
|
||||
type: 'string'
|
||||
},
|
||||
EMAIL: {
|
||||
type: 'string'
|
||||
},
|
||||
VITE_GITHUB_APP_CLIENTID: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_CLIENT_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_PRIVATE_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUP_APP_WEBHOOK_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
JWT_SIGN_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
DOCKER_ENGINE: {
|
||||
type: 'string',
|
||||
default: '/var/run/docker.sock'
|
||||
},
|
||||
DOCKER_NETWORK: {
|
||||
type: 'string',
|
||||
default: 'coollabs'
|
||||
},
|
||||
SECRETS_ENCRYPTION_KEY: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { schema }
|
||||
110
api/server.js
110
api/server.js
@@ -1,110 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const axios = require('axios')
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const { execShellAsync } = require('./libs/common')
|
||||
const { purgeImagesContainers, cleanupStuckedDeploymentsInDB } = require('./libs/applications/cleanup')
|
||||
const fastify = require('fastify')({
|
||||
trustProxy: true,
|
||||
logger: {
|
||||
level: 'error'
|
||||
}
|
||||
})
|
||||
fastify.register(require('../api/libs/http-error'))
|
||||
|
||||
const { schema } = require('./schema')
|
||||
|
||||
process.on('unhandledRejection', async (reason, p) => {
|
||||
await saveServerLog({ message: reason.message, type: 'unhandledRejection' })
|
||||
})
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../dist/')
|
||||
})
|
||||
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
reply.sendFile('index.html')
|
||||
})
|
||||
} else {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../public/')
|
||||
})
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
`mongodb://${process.env.MONGODB_USER}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DB}?authSource=${process.env.MONGODB_DB}&readPreference=primary&ssl=false`,
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
} else {
|
||||
mongoose.connect(
|
||||
'mongodb://localhost:27017/coolify?&readPreference=primary&ssl=false',
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
}
|
||||
|
||||
mongoose.connection.on(
|
||||
'error',
|
||||
console.error.bind(console, 'connection error:')
|
||||
)
|
||||
mongoose.connection.once('open', async function () {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.listen(3000, '0.0.0.0')
|
||||
console.log('Coolify API is up and running in production.')
|
||||
} else {
|
||||
const logFile = fs.createWriteStream('api/development/console.log', { flags: 'w' })
|
||||
const logStdout = process.stdout
|
||||
|
||||
console.log = function (d) {
|
||||
logFile.write(`[INFO]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.error = function (d) {
|
||||
logFile.write(`[ERROR]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.warn = function (d) {
|
||||
logFile.write(`[WARN]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
try {
|
||||
// Always cleanup server logs
|
||||
await mongoose.connection.db.dropCollection('logs-servers')
|
||||
} catch (error) {
|
||||
// Could not cleanup logs-servers collection
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
try {
|
||||
await cleanupStuckedDeploymentsInDB()
|
||||
} catch (error) {
|
||||
// Could not cleanup DB 🤔
|
||||
}
|
||||
try {
|
||||
// Doing because I do not want to prune these images. Prune skips coolify-reserve labeled images.
|
||||
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04', 'php:apache', 'rust:latest']
|
||||
for (const image of basicImages) {
|
||||
// await execShellAsync(`echo "FROM ${image}" | docker build --label coolify-reserve=true -t ${image} -`)
|
||||
await execShellAsync(`docker pull ${image}`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Could not pull some basic images from Docker Hub.')
|
||||
console.log(error)
|
||||
}
|
||||
})
|
||||
11
data/docker/daemon.json
Normal file
11
data/docker/daemon.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "100m",
|
||||
"max-file": "5"
|
||||
},
|
||||
"features": {
|
||||
"buildkit": true
|
||||
},
|
||||
"live-restore": true
|
||||
}
|
||||
29
data/haproxy/dataplaneapi.hcl
Normal file
29
data/haproxy/dataplaneapi.hcl
Normal file
@@ -0,0 +1,29 @@
|
||||
config_version = 2
|
||||
name = "easy_gar"
|
||||
mode = "single"
|
||||
status = "null"
|
||||
|
||||
dataplaneapi {
|
||||
host = "0.0.0.0"
|
||||
port = 5555
|
||||
|
||||
transaction {
|
||||
transaction_dir = "/tmp/haproxy"
|
||||
}
|
||||
|
||||
advertised {
|
||||
api_address = ""
|
||||
api_port = 0
|
||||
}
|
||||
}
|
||||
|
||||
haproxy {
|
||||
config_file = "/usr/local/etc/haproxy/haproxy.cfg"
|
||||
haproxy_bin = "/usr/local/sbin/haproxy"
|
||||
|
||||
reload {
|
||||
reload_delay = 2
|
||||
reload_cmd = "kill -HUP 1"
|
||||
restart_cmd = "kill -SIGUSR2 1"
|
||||
}
|
||||
}
|
||||
19
data/haproxy/haproxy.cfg-http.template
Normal file
19
data/haproxy/haproxy.cfg-http.template
Normal file
@@ -0,0 +1,19 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 60s
|
||||
timeout connect 10s
|
||||
timeout client 60s
|
||||
timeout server 60s
|
||||
|
||||
frontend "${APP}"
|
||||
mode http
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode http
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
@@ -0,0 +1,15 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode tcp
|
||||
log global
|
||||
|
||||
frontend "${APP}"
|
||||
mode tcp
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode tcp
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
38
data/haproxy/haproxy.cfg.template
Normal file
38
data/haproxy/haproxy.cfg.template
Normal file
@@ -0,0 +1,38 @@
|
||||
global
|
||||
stats socket /var/run/api.sock user haproxy group haproxy mode 660 level admin expose-fd listeners
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 60s
|
||||
timeout connect 10s
|
||||
timeout client 60s
|
||||
timeout server 60s
|
||||
|
||||
userlist haproxy-dataplaneapi
|
||||
user admin insecure-password "${HAPROXY_PASSWORD}"
|
||||
|
||||
frontend http
|
||||
mode http
|
||||
bind :80
|
||||
bind :443 ssl crt /usr/local/etc/haproxy/ssl/ alpn h2,http/1.1
|
||||
acl is_certbot path_beg /.well-known/acme-challenge/
|
||||
use_backend backend-certbot if is_certbot
|
||||
use_backend %[req.hdr(host),lower]
|
||||
|
||||
frontend stats
|
||||
bind *:8404
|
||||
stats enable
|
||||
stats uri /
|
||||
stats refresh 5s
|
||||
stats admin if TRUE
|
||||
stats auth "${HAPROXY_USERNAME}:${HAPROXY_PASSWORD}"
|
||||
|
||||
backend backend-certbot
|
||||
mode http
|
||||
server certbot host.docker.internal:9080
|
||||
|
||||
program api
|
||||
command /usr/bin/dataplaneapi -f /usr/local/etc/haproxy/dataplaneapi.hcl --userlist haproxy-dataplaneapi
|
||||
no option start-on-reload
|
||||
81
data/haproxy/ssl/default.pem
Normal file
81
data/haproxy/ssl/default.pem
Normal file
@@ -0,0 +1,81 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFETCCAvkCFE/5JtU5geT5hOjFuQPiLgCYHwsOMA0GCSqGSIb3DQEBCwUAMEUx
|
||||
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
|
||||
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjExMDExMDkwNzQ1WhcNMzExMDA5MDkw
|
||||
NzQ1WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
|
||||
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC
|
||||
Ag8AMIICCgKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
ATANBgkqhkiG9w0BAQsFAAOCAgEAGQED96wBGzbMUlk9mIvZeLerzEAB3YfgfAYa
|
||||
EAi79QHxM8UX06xmA2xtGvJSvlU8Xods9vxpBmIUnbDRTIAHNDApT19+vPg/iSfQ
|
||||
1J9Fo4b5kjmWL6SalEdYcxqH9V/QndHta4MXP91u/ZsJ/exwDTZFatXsfGkPjUmN
|
||||
Xp+Ip6iQg7+kV3JpRnMSbevj2Oujs7qTAdQedH38ZTNS0AaM5gvZyQkccCTKNBQ4
|
||||
3O8MhCau7U0EUirndqsQXa0D3o78FpKztLNXSM7919jU2y36kMrWXfArfrBKHJ9b
|
||||
nZeO7nkbHgvmVS8NTg9pR7L7u+YXTa2p1H2ZnpMQvruV7iL/Pb1H2N68UdvnQScL
|
||||
sgacGSzM6b6PVdWRbECiuzC0UyWLZo/LoU3DQFGoiDQ4e/B3+TMrvgFI0CnpAQ4w
|
||||
qiaVFJlRQeF4GaS4qHsN28OBliFATB3TXONFnz1aVkQlEHuh2+JbuL1b1lxvlX5t
|
||||
gBbu/GgAcP4Uy2z4PoDmempAvNi2kCcLB98m+jbFSMSB3nkrdj6MzyN7kW9bhk3T
|
||||
ClimxDmc23seprwLcxJUPP5q+HRB1VLKXLwIYxu+Up3g29d4k1Iy9nUUP9lITLTk
|
||||
blJxZ2BPuQqTLzyqmAEWa1HxljFC1b7oMp9a98PbxC3MxUggM7zx/rgXWxM8osib
|
||||
uwSZmw0=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
AQKCAgEAm1/z33Jwk4crTQAjJ0uBqxm1pW/ndSq4MO8cEzEGjL8F7iWK+/P8LiGV
|
||||
+sPWuuRzX7/N3OVDiFOgnqeniNWV7vK7XE9T0GMN4ALiyVW/D4mIxKOeA7jXycOq
|
||||
aap0DPdCFFbZVLkL10Vhp77LyHFjEsJn/4oTBRk0y1LG/as9bOMD6j29/X7hEL20
|
||||
LOU4LQzEW26YU7lqD+nKlijFjHYSTolRrOBPe/fE1BxxXLFOKfMKbcaygc8xCzTu
|
||||
fhQ8Nep45BtSuQ9Yq/WfSLFecemWR8yvH0k37yxjBknHVD23maZ+/PEEPKWM/2+g
|
||||
IzGsmZrBILVmOb2/v9CWxqY0JEfQ6aU/nLW1ZiXSOIPmKEooK/hPVxFIyQ1yET4G
|
||||
kQZ5RroY/QDrI14ms8P0iDzZ8K3EFKUyjiBbc83Mb0YIZ4hKd76gioOUIPeEQB+y
|
||||
QLZ8Cb9YS3V8uIOJg+F4xlpJSePAZphfSxRLojSiKUeCs8gNUxGz0zwiMNf1p76F
|
||||
8CaLgvSwT/cgQjWitMeeE1Ha+8lY8VzESmd10gPk2uES/qdrmMhwFwovPqqrtMqj
|
||||
kMrFKNy7Crka6me3dhKEtryRTk5ho2IS/VCy/eXQ7lUW8Cl4uFxmjpHYSJMqDWvC
|
||||
vu1p3/B1psSZIy2V2M9QqwZCysHqvGJMOCvYmnc6T62+kDRKQ7ECggEBAOS/1ptA
|
||||
75OBAsHLovkspiCvn3gb/VTvH6LOvxYTohjr5iBeX137vg0aR1rg0jwcdf8EEJYw
|
||||
4YxOid7KmV7O25ujzduQgwpVgujnJAeBLeLDC5dVbq3PQah61AvR2O/7t+Ls3oxi
|
||||
cWh/OHC6SeZ/n406cxSCCUpVwtgHTaNFzaSmpDdEOSbjvXjQQjiRsG7j/1u64riq
|
||||
RlJ/hIUlcys0g94yeN/5lPaNfsq0+vTSAYuTVVXVbEntwWcZVZxnQJviZVgJ99zM
|
||||
RzE3sprvvr+I5QQ4FRMn0W9U7gblSJd5FGEL8gye4SRd+LxoUL4DR6pfuwd0vlXA
|
||||
g+dgiOKoHm2Bb8cCggEBAMDMHMNR6uipdMivPjBTlklnaYd9SY3c5x65yNtx4CNh
|
||||
rXyvy/6YvME7PPnKQZ8TQ4DkbVDUCAF7wnyAJJ7eWMav3bNlqWWjzaBvQz4Fn0XG
|
||||
/1W5R1CoJ9DW5FY3f9efJzQTmfn6dIlCx1gW7XfVBZQqI1LORMWUYenk0KAvjlg2
|
||||
UHYYl/BT1RhtYyzOJHto1PaUvCNDiOiDWAkTpLigYm7hGgVmcSwbo4F54SNUHdV7
|
||||
yz3CorCM4VsEYYSL80WHYxf/Zc+mcIDoWOdog0iEeK/Zu++yG5lPRxC1862GmsZA
|
||||
J06BMqX+NVGOfiGcVaGH+SZJXeFcrr7F8ZWp6y38QSMCggEAJwzo4hAv1gqMIfFV
|
||||
nRwWMDZLDwIYOUupJu4MiQRJA+AhpRz3QuAbDbmSvNzshv6E1kgnXLxzhLRTrQkB
|
||||
LcI6k1NfbUA6XqVCd+gdqnpPDwslC2y2PE3Jc62kTXBBjJZ4SfEN/QFBQwmU5Qmo
|
||||
XAUlg8KaqsGYPGxvmtmEU38zIAyitByddRoj2mATLf0RFZ0ulsZMtiG7Z5IFWYWP
|
||||
J60LZf9Py0ycNYrqPkivHuRLBzzbsI+CsQw5nBQjHVQzH2mCy4jIG5V0Ad70Sqbq
|
||||
9V+1WQcJ8f82Lb9d8ydpQRKWfArCA42L+d1g/SkBv65nqZo2H4u6goEfA3zjYW45
|
||||
44/ZOQKCAQB440MhwYqe6ioc76z51l+ElUAZQZjOR/XvUSS9XHDjHosOhJhPgmvQ
|
||||
aZl5MrXkzcpk1lYo+Vovu+8d66eKqfZWVs2XgCYwYf48G6e5CwNsWDOgB7XMwDN/
|
||||
Ak9YNCKIC/Yj9Cp3EPDjZCjkdjPeEIcX+Tf+4vFCRiEC7INX/ZmufBgFhLQ4cAhM
|
||||
8cHexT8g1oG6P1acces1h626u0NstLwjtCeBvVM3CfmC5O4jHco7Iw00I4epVhyz
|
||||
2lJfLvWR4itjT7QB+OXQHmAocWLoJJAcC1WJHU+q2IfB1aT+aElCB9XdpqsgY/4A
|
||||
rm0uG/2hdEXoGNaxyVCUtD8fzdR2GBarAoIBACCYXXREMYb6i1TbR5Q2LvVQUPsO
|
||||
Hgnbr+PLmx93rfUzDcr5r+cJgryjYQDKJTRleDJhg80M3RYOq+IOdl6yxOmRATmJ
|
||||
ZDgwRVD1F6VFxBJePcAW30FI5CoBogsHaZQDKGsopEaDRLK5E3QHUVG5qj323RdI
|
||||
Unf1++wI4nw+qwsVf1gSTcAdzq29v3NIWUyvvrmTNO4MxFTt0/lqkCsdT/2EFQDB
|
||||
/yQ1HCtQQjXE1xlYh0BnMZp9+4FmrlMC9Oj5H0dDSWmInPION0ft8/SjBj4TQ5Qi
|
||||
2DUo1WOWQnVR8Bxz0B8McXS+dOmgLe8ws4/ez7DoEVqHTgirKqBg5qRFQKw=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
0
db/.gitkeep
Normal file
0
db/.gitkeep
Normal file
20
docker-compose-dev.yaml
Normal file
20
docker-compose-dev.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: 'bitnami/redis:6.2'
|
||||
container_name: coolify-redis
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
networks:
|
||||
- coolify-infra
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 6379
|
||||
protocol: tcp
|
||||
mode: host
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
23
docker-compose-haproxy.yaml
Normal file
23
docker-compose-haproxy.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
haproxy:
|
||||
image: coollabsio/coolify-haproxy-alpine:latest
|
||||
container_name: coolify-haproxy
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
- coolify
|
||||
volumes:
|
||||
- './data/haproxy/:/usr/local/etc/haproxy/'
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
- '8404:8404'
|
||||
- '5555:5555'
|
||||
- '3306:3306'
|
||||
|
||||
networks:
|
||||
coolify:
|
||||
attachable: true
|
||||
name: coolify
|
||||
43
docker-compose.yaml
Normal file
43
docker-compose.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
coolify:
|
||||
image: coollabsio/coolify:${TAG:-latest}
|
||||
restart: always
|
||||
container_name: coolify
|
||||
ports:
|
||||
- target: 3000
|
||||
published: 3000
|
||||
protocol: tcp
|
||||
mode: host
|
||||
volumes:
|
||||
- 'coolify-db:/app/db'
|
||||
- 'coolify-ssl-certs:/app/ssl'
|
||||
- 'coolify-letsencrypt:/etc/letsencrypt'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
env_file:
|
||||
- '.env'
|
||||
networks:
|
||||
- coolify-infra
|
||||
depends_on: ['redis']
|
||||
redis:
|
||||
image: bitnami/redis:6.2
|
||||
restart: always
|
||||
container_name: coolify-redis
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
networks:
|
||||
- coolify-infra
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
|
||||
volumes:
|
||||
coolify-db:
|
||||
name: coolify-db
|
||||
coolify-ssl-certs:
|
||||
name: coolify-ssl-certs
|
||||
coolify-letsencrypt:
|
||||
name: coolify-letsencrypt
|
||||
6
haproxy-http.Dockerfile
Normal file
6
haproxy-http.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-http.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
haproxy-tcp.Dockerfile
Normal file
6
haproxy-tcp.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-tcp.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
haproxy.Dockerfile
Normal file
6
haproxy.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
20
index.html
20
index.html
@@ -1,20 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="/favicon.png" />
|
||||
<link rel="preload" as="image" href="/favicon.png">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>coolify: Heroku & Netlify alternative</title>
|
||||
<link rel="dns-prefetch" href="https://cdn.coollabs.io/" />
|
||||
<link rel="preconnect" href="https://cdn.coollabs.io/" crossorigin="" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/fonts/montserrat/montserrat.css" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/css/microtip-0.2.2.min.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script type="module" src="/src/index.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
88
install.sh
88
install.sh
@@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
preTasks() {
|
||||
echo '
|
||||
##############################
|
||||
#### Pulling Git Updates #####
|
||||
##############################'
|
||||
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git pull
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##############################
|
||||
#### Building Base Image #####
|
||||
##############################'
|
||||
docker build --label coolify-reserve=true -t coolify-base -f install/Dockerfile-base .
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##################################
|
||||
#### Checking configuration. #####
|
||||
##################################'
|
||||
docker run --rm -w /usr/src/app coolify-base node install/install.js --check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
##################################
|
||||
#### Missing configuration ! #####
|
||||
##################################'
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
"all")
|
||||
preTasks
|
||||
echo '
|
||||
#################################
|
||||
#### Rebuilding everything. #####
|
||||
#################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type all
|
||||
;;
|
||||
"coolify")
|
||||
preTasks
|
||||
echo '
|
||||
##############################
|
||||
#### Rebuilding Coolify. #####
|
||||
##############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type coolify
|
||||
;;
|
||||
"proxy")
|
||||
preTasks
|
||||
echo '
|
||||
############################
|
||||
#### Rebuilding Proxy. #####
|
||||
############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type proxy
|
||||
;;
|
||||
"upgrade-phase-1")
|
||||
preTasks
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P1. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type upgrade
|
||||
;;
|
||||
"upgrade-phase-2")
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P2. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/update.js --type upgrade
|
||||
;;
|
||||
*)
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,5 +0,0 @@
|
||||
FROM coolify-base
|
||||
WORKDIR /usr/src/app
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,19 +0,0 @@
|
||||
FROM ubuntu:20.04 as binaries
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN chmod +x /usr/bin/envsubst
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
|
||||
FROM node:14 as modules
|
||||
COPY --from=binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
RUN curl -L https://pnpm.js.org/pnpm.js | node - add --global pnpm
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package*.json .
|
||||
RUN pnpm install
|
||||
|
||||
FROM modules
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
WORKDIR /usr/src/app
|
||||
RUN curl -fsSL https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz | tar -xzvf - docker/docker -C . --strip-components 1
|
||||
RUN mv /usr/src/app/docker /usr/bin/docker
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq /usr/bin/docker
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm
|
||||
COPY ./*package.json .
|
||||
RUN pnpm install
|
||||
COPY . .
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,10 +0,0 @@
|
||||
Some of the files are here for backwards compatibility.
|
||||
|
||||
I will do things after 2 months:
|
||||
|
||||
- rm ./install.js and ./update.js
|
||||
- rm ../install.sh
|
||||
- rm ./Dockerfile-base
|
||||
- rm ./obs
|
||||
- rm ./check.js "No need to check env file. During installation, it is checked by the installer. If you change it between to upgrades: 🤷♂️"
|
||||
- Rename Dockerfile-new to Dockerfile
|
||||
@@ -1,24 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 8080
|
||||
published: 8080
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --api.dashboard=true
|
||||
- --api.debug=true
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=false
|
||||
- --api.dashboard=false
|
||||
- --api.debug=false
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge=true
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web
|
||||
- --certificatesresolvers.letsencrypt.acme.email=${EMAIL}
|
||||
- --certificatesresolvers.letsencrypt.acme.storage=/data/coolify/acme.json
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /data/coolify:/data/coolify
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
coolify:
|
||||
image: coolify
|
||||
hostname: coollabs-coolify
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
command: "yarn start"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.coolify.entrypoints=websecure"
|
||||
- "traefik.http.routers.coolify.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.routers.coolify.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.services.coolify.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.coolify.middlewares=global-compress"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
if (options.check) {
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
} else {
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
shell.exec(`docker network create ${process.env.DOCKER_NETWORK} --driver overlay`, { silent: !options.debug })
|
||||
shell.exec('docker build -t coolify -f install/Dockerfile .')
|
||||
if (options.type === 'all') {
|
||||
shell.exec('docker stack rm coollabs-coolify', { silent: !options.debug })
|
||||
} else if (options.type === 'coolify') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
} else if (options.type === 'proxy') {
|
||||
shell.exec('docker service rm coollabs-coolify_proxy')
|
||||
}
|
||||
if (options.type !== 'upgrade') {
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
}
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM coolify-base-nodejs
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
@@ -1,6 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
COPY --from=coolify-binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=coolify-binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
COPY --from=coolify-binaries /usr/bin/jq /usr/bin/jq
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6
|
||||
@@ -1,9 +0,0 @@
|
||||
FROM ubuntu:20.04
|
||||
LABEL coolify-preserve=true
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
@@ -1,21 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
const options = program.opts()
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (options.type === 'upgrade') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
153
package.json
153
package.json
@@ -1,68 +1,89 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
|
||||
"version": "1.0.10",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"start": "NODE_ENV=production node api/server",
|
||||
"dev": "run-p dev:db dev:routify dev:svite dev:server",
|
||||
"dev:db": "NODE_ENV=development node api/development/mongodb.js",
|
||||
"dev:server": "nodemon -w api api/server",
|
||||
"dev:routify": "routify run",
|
||||
"dev:svite": "svite",
|
||||
"build": "run-s build:routify build:svite",
|
||||
"build:routify": "routify run -b",
|
||||
"build:svite": "svite build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@roxi/routify": "^2.15.1",
|
||||
"@zerodevx/svelte-toast": "^0.2.2",
|
||||
"ajv": "^8.1.0",
|
||||
"axios": "^0.21.1",
|
||||
"commander": "^7.2.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
"cuid": "^2.1.8",
|
||||
"dayjs": "^1.10.4",
|
||||
"deepmerge": "^4.2.2",
|
||||
"dockerode": "^3.2.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"fastify": "^3.14.2",
|
||||
"fastify-env": "^2.1.0",
|
||||
"fastify-jwt": "^2.4.0",
|
||||
"fastify-plugin": "^3.0.0",
|
||||
"fastify-static": "^4.0.1",
|
||||
"generate-password": "^1.6.0",
|
||||
"http-errors-enhanced": "^0.7.0",
|
||||
"js-yaml": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"mongoose": "^5.12.3",
|
||||
"shelljs": "^0.8.4",
|
||||
"svelte-select": "^3.17.0",
|
||||
"unique-names-generator": "^4.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mongodb-memory-server-core": "^6.9.6",
|
||||
"nodemon": "^2.0.7",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.2.9",
|
||||
"postcss-import": "^14.0.1",
|
||||
"postcss-load-config": "^3.0.1",
|
||||
"postcss-preset-env": "^6.7.0",
|
||||
"prettier": "2.2.1",
|
||||
"prettier-plugin-svelte": "^2.2.0",
|
||||
"standard": "^16.0.3",
|
||||
"svelte": "^3.37.0",
|
||||
"svelte-hmr": "^0.14.0",
|
||||
"svelte-preprocess": "^4.7.0",
|
||||
"svite": "0.8.1",
|
||||
"tailwindcss": "2.1.1"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte",
|
||||
"routify",
|
||||
"fastify",
|
||||
"tailwind"
|
||||
]
|
||||
"name": "coolify",
|
||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||
"version": "2.2.7",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"dev": "docker-compose -f docker-compose-dev.yaml up -d && NODE_ENV=development svelte-kit dev",
|
||||
"dev:stop": "docker-compose -f docker-compose-dev.yaml down",
|
||||
"dev:logs": "docker-compose -f docker-compose-dev.yaml logs -f --tail 10",
|
||||
"studio": "npx prisma studio",
|
||||
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js",
|
||||
"build": "svelte-kit build",
|
||||
"preview": "svelte-kit preview",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||
"release:staging": "cross-var docker build -t coollabsio/coolify:$npm_package_version . && docker push coollabsio/coolify:$npm_package_version",
|
||||
"release:pre": "cross-var docker build -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest .",
|
||||
"release:coolify": "cross-var yarn release:pre && docker push coollabsio/coolify:$npm_package_version && docker push coollabsio/coolify:latest",
|
||||
"release:haproxy": "docker build -f haproxy.Dockerfile -t coollabsio/coolify-haproxy-alpine:1.0.0 -t coollabsio/coolify-haproxy-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-alpine",
|
||||
"release:haproxy:tcp": "docker build -f haproxy-tcp.Dockerfile -t coollabsio/coolify-haproxy-tcp-alpine:1.0.0 -t coollabsio/coolify-haproxy-tcp-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-tcp-alpine",
|
||||
"release:haproxy:http": "docker build -f haproxy-http.Dockerfile -t coollabsio/coolify-haproxy-http-alpine:1.0.0 -t coollabsio/coolify-haproxy-http-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-http-alpine",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-node": "1.0.0-next.73",
|
||||
"@sveltejs/kit": "1.0.0-next.303",
|
||||
"@types/bcrypt": "5.0.0",
|
||||
"@types/js-cookie": "3.0.1",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/node": "17.0.23",
|
||||
"@types/node-forge": "1.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "4.31.1",
|
||||
"@typescript-eslint/parser": "4.31.1",
|
||||
"@zerodevx/svelte-toast": "0.7.1",
|
||||
"autoprefixer": "10.4.4",
|
||||
"cross-var": "1.1.0",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-svelte3": "3.4.1",
|
||||
"husky": "7.0.4",
|
||||
"lint-staged": "12.3.7",
|
||||
"postcss": "8.4.12",
|
||||
"prettier": "2.6.1",
|
||||
"prettier-plugin-svelte": "2.6.0",
|
||||
"prettier-plugin-tailwindcss": "0.1.8",
|
||||
"prisma": "3.11.1",
|
||||
"svelte": "3.46.4",
|
||||
"svelte-check": "2.4.6",
|
||||
"svelte-preprocess": "4.10.4",
|
||||
"svelte-select": "^4.4.7",
|
||||
"tailwindcss": "3.0.23",
|
||||
"ts-node": "10.7.0",
|
||||
"tslib": "2.3.1",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.11.1",
|
||||
"@sentry/node": "6.19.2",
|
||||
"bcrypt": "5.0.1",
|
||||
"bullmq": "1.78.1",
|
||||
"compare-versions": "4.1.3",
|
||||
"cookie": "0.4.2",
|
||||
"cooltipz-css": "^2.1.0",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.0",
|
||||
"dockerode": "3.3.1",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.0.2",
|
||||
"js-cookie": "3.0.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"mustache": "^4.2.0",
|
||||
"node-forge": "1.3.0",
|
||||
"svelte-kit-cookie-session": "2.1.2",
|
||||
"tailwindcss-scrollbar": "^0.1.0",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
9241
pnpm-lock.yaml
generated
9241
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
postcss.config.cjs
Normal file
6
postcss.config.cjs
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: [
|
||||
require('postcss-import'),
|
||||
require('tailwindcss'),
|
||||
require('postcss-preset-env')({ stage: 1 })
|
||||
]
|
||||
}
|
||||
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
19
prisma/migrations/20220219231255_prmr_secrets/migration.sql
Normal file
19
prisma/migrations/20220219231255_prmr_secrets/migration.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isPRMRSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Secret" ("applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value") SELECT "applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value" FROM "Secret";
|
||||
DROP TABLE "Secret";
|
||||
ALTER TABLE "new_Secret" RENAME TO "Secret";
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_isPRMRSecret_key" ON "Secret"("name", "applicationId", "isPRMRSecret");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
2
prisma/migrations/20220301101928_proxyhash/migration.sql
Normal file
2
prisma/migrations/20220301101928_proxyhash/migration.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyHash" TEXT;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user