mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
75 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c | ||
|
|
9c173d1de0 | ||
|
|
e11b6d74ed | ||
|
|
c7efe899fa | ||
|
|
adcd68c1ab | ||
|
|
23a4ebb74a | ||
|
|
cccb9a5fec | ||
|
|
b416e3ab3e | ||
|
|
e16b7d65d4 | ||
|
|
3744c64459 | ||
|
|
f742c2a3e2 |
@@ -1,4 +1,11 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
.routify
|
||||
.pnpm-store
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
|
||||
.env
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
@@ -1,35 +1,5 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
COOLIFY_APP_ID=
|
||||
COOLIFY_SECRET_KEY=
|
||||
COOLIFY_DATABASE_URL=file:../db/prod.db
|
||||
COOLIFY_SENTRY_DSN=
|
||||
COOLIFY_IS_ON="docker"
|
||||
20
.eslintrc.cjs
Normal file
20
.eslintrc.cjs
Normal file
@@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
plugins: ['svelte3', '@typescript-eslint'],
|
||||
ignorePatterns: ['*.cjs'],
|
||||
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||
settings: {
|
||||
'svelte3/typescript': () => require('typescript')
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
es2017: true,
|
||||
node: true
|
||||
}
|
||||
};
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
ko_fi: andrasbacsai
|
||||
open_collective: coollabsio
|
||||
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -1,11 +1,13 @@
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
.routify
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
|
||||
.env
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
.pnpm-store
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
|
||||
1
.husky/_/.gitignore
vendored
Normal file
1
.husky/_/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*
|
||||
4
.husky/pre-commit
Executable file
4
.husky/pre-commit
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
yarn lint-staged
|
||||
5
.lintstagedrc.json
Normal file
5
.lintstagedrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
|
||||
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
|
||||
]
|
||||
}
|
||||
18
.prettierrc
18
.prettierrc
@@ -1,14 +1,6 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"printWidth": 80,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"svelteSortOrder" : "styles-scripts-markup",
|
||||
"svelteStrictMode": true,
|
||||
"svelteBracketNewLine": true,
|
||||
"svelteAllowShorthand": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
}
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
||||
30
Dockerfile
Normal file
30
Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
FROM node:16.14.0-alpine
|
||||
WORKDIR /app
|
||||
COPY package*.json .
|
||||
RUN yarn install
|
||||
COPY . .
|
||||
RUN yarn build
|
||||
|
||||
FROM node:16.14.0-alpine
|
||||
WORKDIR /app
|
||||
|
||||
LABEL coolify.managed true
|
||||
|
||||
RUN apk add --no-cache git openssh-client curl jq cmake sqlite
|
||||
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
|
||||
RUN curl -fsSL "https://download.docker.com/linux/static/stable/x86_64/docker-20.10.9.tgz" | tar -xzvf - docker/docker -C . --strip-components 1 && mv docker /usr/bin/docker
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://github.com/docker/compose/releases/download/v2.2.2/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose
|
||||
|
||||
COPY --from=0 /app/docker-compose.yaml .
|
||||
COPY --from=0 /app/build .
|
||||
COPY --from=0 /app/package.json .
|
||||
COPY --from=0 /app/node_modules ./node_modules
|
||||
COPY --from=0 /app/prisma ./prisma
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["pnpm", "start"]
|
||||
661
LICENSE
661
LICENSE
@@ -1,661 +0,0 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
137
README.md
137
README.md
@@ -1,93 +1,80 @@
|
||||
# About
|
||||
# Coolify
|
||||
|
||||
https://andrasbacsai.com/farewell-netlify-and-heroku-after-3-days-of-coding
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
# Features
|
||||
- Deploy your Node.js, static sites, PHP or any custom application (with custom Dockerfile) just by pushing code to git.
|
||||
- Hassle-free installation and upgrade process.
|
||||
- One-click MongoDB, MySQL, PostgreSQL, CouchDB deployments!
|
||||
## Installation
|
||||
|
||||
# Upcoming features
|
||||
- Backups & monitoring.
|
||||
- User analytics with privacy in mind.
|
||||
- And much more (see [Roadmap](https://github.com/coollabsio/coolify/projects/1)).
|
||||
Installation is automated with the following command:
|
||||
|
||||
```bash
|
||||
/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"
|
||||
```
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 is not compatible with v1.
|
||||
|
||||
## Features
|
||||
### Git Sources
|
||||
You can use the official ones or your self hosted version!
|
||||
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
### Destinations
|
||||
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
|
||||
# FAQ
|
||||
Q: What is a buildpack?
|
||||
### Applications
|
||||
|
||||
A: It defines your application's final form.
|
||||
`Static` means that it will be hosted as a static site.
|
||||
`NodeJs` means that it will be started as a node application.
|
||||
- Static sites
|
||||
- NodeJS
|
||||
- VueJS
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- NextJS
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Rust
|
||||
- Dockerfile (you can provide it)
|
||||
|
||||
# Screenshots
|
||||
### Databases
|
||||
|
||||
[Login](https://coollabs.io/coolify/login.jpg)
|
||||
- MongoDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
[Applications](https://coollabs.io/coolify/applications.jpg)
|
||||
### One-click services
|
||||
|
||||
[Databases](https://coollabs.io/coolify/databases.jpg)
|
||||
- [WordPress](https://wordpress.org)
|
||||
- [Plausible Analytics](https://plausible.io)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
|
||||
[Configuration](https://coollabs.io/coolify/configuration.jpg)
|
||||
|
||||
[Settings](https://coollabs.io/coolify/settings.jpg)
|
||||
## Support
|
||||
|
||||
[Logs](https://coollabs.io/coolify/logs.jpg)
|
||||
|
||||
# Getting Started
|
||||
|
||||
Automatically: `/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"`
|
||||
|
||||
Manually:
|
||||
### Requirements before installation
|
||||
- [Docker](https://docs.docker.com/engine/install/) version 20+
|
||||
- Docker in [swarm mode enabled](https://docs.docker.com/engine/reference/commandline/swarm_init/) (should be set manually before installation)
|
||||
- A [MongoDB](https://docs.mongodb.com/manual/installation/) instance.
|
||||
- We have a [simple installation](https://github.com/coollabsio/infrastructure/tree/main/mongo) if you need one
|
||||
- A configured DNS entry (see `.env.template`)
|
||||
- [Github App](https://docs.github.com/en/developers/apps/creating-a-github-app)
|
||||
|
||||
- GitHub App name: could be anything weird
|
||||
- Homepage URL: https://yourdomain
|
||||
|
||||
Identifying and authorizing users:
|
||||
- Callback URL: https://yourdomain/api/v1/login/github/app
|
||||
- Request user authorization (OAuth) during installation -> Check!
|
||||
|
||||
Webhook:
|
||||
- Active -> Check!
|
||||
- Webhook URL: https://yourdomain/api/v1/webhooks/deploy
|
||||
- Webhook Secret: it should be super secret
|
||||
|
||||
Repository permissions:
|
||||
- Contents: Read-only
|
||||
- Metadata: Read-only
|
||||
|
||||
User permissions:
|
||||
- Email: Read-only
|
||||
|
||||
Subscribe to events:
|
||||
- Push -> Check!
|
||||
|
||||
### Installation
|
||||
- Clone this repository: `git clone git@github.com:coollabsio/coolify.git`
|
||||
- Set `.env` (see `.env.template`)
|
||||
- Installation: `bash install.sh all`
|
||||
|
||||
## Manual updating process (You probably never need to do this!)
|
||||
### Update everything (proxy+coolify)
|
||||
- `bash install.sh all`
|
||||
|
||||
### Update coolify only
|
||||
- `bash install.sh coolify`
|
||||
|
||||
### Update proxy only
|
||||
- `bash install.sh proxy`
|
||||
|
||||
# Contact
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Roadmap
|
||||
|
||||
[See the Roadmap here](https://github.com/coollabsio/coolify/projects/1)
|
||||
|
||||
(Will be updated soon!)
|
||||
|
||||
## License
|
||||
|
||||
# License
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
|
||||
28
api/app.js
28
api/app.js
@@ -1,28 +0,0 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
server.register(require('./routes/v1/config'), { prefix: '/config' })
|
||||
server.register(require('./routes/v1/application/remove'), { prefix: '/application/remove' })
|
||||
server.register(require('./routes/v1/application/logs'), { prefix: '/application/logs' })
|
||||
server.register(require('./routes/v1/application/check'), { prefix: '/application/check' })
|
||||
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
|
||||
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
|
||||
server.register(require('./routes/v1/databases'), { prefix: '/databases' })
|
||||
server.register(require('./routes/v1/server'), { prefix: '/server' })
|
||||
})
|
||||
// Public routes
|
||||
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })
|
||||
fastify.register(require('./routes/v1/login/github'), {
|
||||
prefix: '/login/github'
|
||||
})
|
||||
fastify.register(require('./routes/v1/webhooks/deploy'), {
|
||||
prefix: '/webhooks/deploy'
|
||||
})
|
||||
fastify.register(require('./routes/v1/undead'), {
|
||||
prefix: '/undead'
|
||||
})
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw new Error('No custom dockerfile found.')
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../libs/docker')
|
||||
const buildImageNodeDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ${configuration.build.directory}/package*.json .`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`COPY ./${configuration.build.directory} .`,
|
||||
`RUN ${configuration.build.command.build}`
|
||||
].join('\n')
|
||||
}
|
||||
async function buildImage (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildImage
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
const static = require('./static')
|
||||
const nodejs = require('./nodejs')
|
||||
const php = require('./php')
|
||||
const custom = require('./custom')
|
||||
const rust = require('./rust')
|
||||
|
||||
module.exports = { static, nodejs, php, custom, rust }
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// `HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost:${configuration.publish.port}${configuration.publish.path} || exit 1`,
|
||||
const publishNodejsDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} .`
|
||||
: `
|
||||
COPY ${configuration.build.directory}/package*.json .
|
||||
RUN ${configuration.build.command.installation}
|
||||
COPY ./${configuration.build.directory} .`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
'CMD [ "yarn", "start" ]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishPHPDocker = (configuration) => {
|
||||
return [
|
||||
'FROM php:apache',
|
||||
'RUN a2enmod rewrite',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ./${configuration.build.directory} /var/www/html`,
|
||||
'EXPOSE 80',
|
||||
' CMD ["apache2-foreground"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
const { execShellAsync } = require('../../libs/common')
|
||||
const TOML = require('@iarna/toml')
|
||||
|
||||
const publishRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target target`,
|
||||
`COPY --from=${configuration.build.container.name}:cache /usr/local/cargo /usr/local/cargo`,
|
||||
'COPY . .',
|
||||
`RUN cargo build --release --bin ${custom.name}`,
|
||||
'FROM debian:buster-slim',
|
||||
'WORKDIR /app',
|
||||
'RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*',
|
||||
'RUN update-ca-certificates',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target/release/${custom.name} ${custom.name}`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
`CMD ["/app/${custom.name}"]`
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
const cacheRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
`FROM rust:latest AS planner-${configuration.build.container.name}`,
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
'COPY . .',
|
||||
'RUN cargo chef prepare --recipe-path recipe.json',
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
`COPY --from=planner-${configuration.build.container.name} /app/recipe.json recipe.json`,
|
||||
'RUN cargo chef cook --release --recipe-path recipe.json'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
|
||||
const parsedToml = TOML.parse(cargoToml)
|
||||
const custom = {
|
||||
name: parsedToml.package.name
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
|
||||
|
||||
let stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:cache` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
|
||||
|
||||
stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishStaticDocker = (configuration) => {
|
||||
return [
|
||||
'FROM nginx:stable-alpine',
|
||||
'COPY nginx.conf /etc/nginx/nginx.conf',
|
||||
'WORKDIR /usr/share/nginx/html',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} .`
|
||||
: `COPY ./${configuration.build.directory} .`,
|
||||
'EXPOSE 80',
|
||||
'CMD ["nginx", "-g", "daemon off;"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server-core')
|
||||
|
||||
const mongoServer = new MongoMemoryServer({
|
||||
instance: {
|
||||
port: 27017,
|
||||
dbName: 'coolify',
|
||||
storageEngine: 'wiredTiger'
|
||||
},
|
||||
binary: {
|
||||
version: '4.4.3'
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
mongoose.Promise = Promise
|
||||
mongoServer.getUri().then((mongoUri) => {
|
||||
const mongooseOpts = {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true
|
||||
}
|
||||
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
|
||||
mongoose.connection.on('error', (e) => {
|
||||
if (e.message.code === 'ETIMEDOUT') {
|
||||
console.log(e)
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
}
|
||||
console.log(e)
|
||||
})
|
||||
|
||||
mongoose.connection.once('open', () => {
|
||||
console.log(`Started in-memory mongodb ${mongoUri}`)
|
||||
})
|
||||
})
|
||||
@@ -1,28 +0,0 @@
|
||||
const packs = require('../../../buildPacks')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const deployId = configuration.general.deployId
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
await execute(configuration)
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} else {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
} catch (error) {
|
||||
// Hmm.
|
||||
}
|
||||
throw new Error('No buildpack found.')
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
const { docker } = require('../../docker')
|
||||
const { execShellAsync } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeImagesContainers () {
|
||||
await execShellAsync('docker container prune -f')
|
||||
await execShellAsync('docker image prune -f --filter=label!=coolify-reserve=true')
|
||||
}
|
||||
|
||||
async function cleanupStuckedDeploymentsInDB () {
|
||||
// Cleanup stucked deployments.
|
||||
await Deployment.updateMany(
|
||||
{ progress: { $in: ['queued', 'inprogress'] } },
|
||||
{ progress: 'failed' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { cleanupStuckedDeploymentsInDB, deleteSameDeployments, purgeImagesContainers }
|
||||
@@ -1,136 +0,0 @@
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const cuid = require('cuid')
|
||||
const crypto = require('crypto')
|
||||
const { docker } = require('../docker')
|
||||
const { execShellAsync } = require('../common')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 3
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first',
|
||||
failure_action: 'rollback'
|
||||
}
|
||||
}
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
} else if (configuration.build.pack === 'rust') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
|
||||
if (!configuration.build.directory) configuration.build.directory = ''
|
||||
if (configuration.build.directory.startsWith('/')) configuration.build.directory = configuration.build.directory.replace('/', '')
|
||||
|
||||
if (!configuration.publish.directory) configuration.publish.directory = ''
|
||||
if (configuration.publish.directory.startsWith('/')) configuration.publish.directory = configuration.publish.directory.replace('/', '')
|
||||
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration) {
|
||||
// In case of any failure during deployment, still update the current configuration.
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
const found = services.find(s => {
|
||||
const config = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
|
||||
return config
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function precheckDeployment ({ services, configuration }) {
|
||||
let foundService = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
// Base service configuration changed
|
||||
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
|
||||
forceUpdate = true
|
||||
}
|
||||
// If the deployment is in error state, forceUpdate
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running' && n.Image.split(':')[1] === running.build.container.tag)
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
foundService = true
|
||||
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
// If only the configuration changed
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
// If only the image changed
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
// If build pack changed, forceUpdate the service
|
||||
if (running.build.pack !== configuration.build.pack) forceUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
}
|
||||
return {
|
||||
foundService,
|
||||
imageChanged,
|
||||
configChanged,
|
||||
forceUpdate
|
||||
}
|
||||
}
|
||||
module.exports = { setDefaultConfiguration, updateServiceLabels, precheckDeployment }
|
||||
@@ -1,64 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
// TODO: Write full .dockerignore for all deployments!!
|
||||
if (configuration.build.pack === 'php') {
|
||||
await fs.writeFile(`${configuration.general.workdir}/.htaccess`, `
|
||||
RewriteEngine On
|
||||
RewriteBase /
|
||||
RewriteCond %{REQUEST_FILENAME} !-d
|
||||
RewriteCond %{REQUEST_FILENAME} !-f
|
||||
RewriteRule ^(.+)$ index.php [QSA,L]
|
||||
`)
|
||||
}
|
||||
// await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
|
||||
if (configuration.build.pack === 'static') {
|
||||
await fs.writeFile(
|
||||
`${configuration.general.workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
|
||||
access_log off;
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const { execShellAsync } = require('../../common')
|
||||
const { docker } = require('../../docker')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
|
||||
module.exports = async function (configuration, imageChanged) {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
const jwt = require('jsonwebtoken')
|
||||
const axios = require('axios')
|
||||
const { execShellAsync, cleanupTmp } = require('../../common')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { workdir } = configuration.general
|
||||
const { organization, name, branch } = configuration.repository
|
||||
const github = configuration.github
|
||||
|
||||
const githubPrivateKey = process.env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, '\n').replace(/"/g, '')
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { cleanupTmp } = require('../common')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { purgeImagesContainers } = require('./cleanup')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname, workdir } = configuration.general
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration)
|
||||
cleanupTmp(workdir)
|
||||
await purgeImagesContainers()
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
@@ -1,98 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const shell = require('shelljs')
|
||||
const jsonwebtoken = require('jsonwebtoken')
|
||||
const { docker } = require('./docker')
|
||||
const User = require('../models/User')
|
||||
const algorithm = 'aes-256-cbc'
|
||||
const key = process.env.SECRETS_ENCRYPTION_KEY
|
||||
|
||||
function delay (t) {
|
||||
return new Promise(function (resolve) {
|
||||
setTimeout(function () {
|
||||
resolve('OK')
|
||||
}, t)
|
||||
})
|
||||
}
|
||||
|
||||
async function verifyUserId (authorization) {
|
||||
try {
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
function execShellAsync (cmd, opts = {}) {
|
||||
try {
|
||||
return new Promise(function (resolve, reject) {
|
||||
shell.config.silent = true
|
||||
shell.exec(cmd, opts, function (code, stdout, stderr) {
|
||||
if (code !== 0) return reject(new Error(stderr))
|
||||
return resolve(stdout)
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
return new Error('Oops')
|
||||
}
|
||||
}
|
||||
function cleanupTmp (dir) {
|
||||
if (dir !== '/') shell.rm('-fr', dir)
|
||||
}
|
||||
|
||||
async function checkImageAvailable (name) {
|
||||
let cacheAvailable = false
|
||||
try {
|
||||
await docker.engine.getImage(name).get()
|
||||
cacheAvailable = true
|
||||
} catch (e) {
|
||||
// Cache image not found
|
||||
}
|
||||
return cacheAvailable
|
||||
}
|
||||
|
||||
function encryptData (text) {
|
||||
const iv = crypto.randomBytes(16)
|
||||
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
|
||||
let encrypted = cipher.update(text)
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()])
|
||||
return { iv: iv.toString('hex'), encryptedData: encrypted.toString('hex') }
|
||||
}
|
||||
|
||||
function decryptData (text) {
|
||||
const iv = Buffer.from(text.iv, 'hex')
|
||||
const encryptedText = Buffer.from(text.encryptedData, 'hex')
|
||||
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
|
||||
let decrypted = decipher.update(encryptedText)
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()])
|
||||
return decrypted.toString()
|
||||
}
|
||||
|
||||
function createToken (payload) {
|
||||
const { uuid } = payload
|
||||
return jsonwebtoken.sign({}, process.env.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolify',
|
||||
issuer: 'coolify',
|
||||
jwtid: uuid,
|
||||
subject: `User:${uuid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
delay,
|
||||
createToken,
|
||||
execShellAsync,
|
||||
cleanupTmp,
|
||||
checkImageAvailable,
|
||||
encryptData,
|
||||
decryptData,
|
||||
verifyUserId
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const Dockerode = require('dockerode')
|
||||
const { saveAppLog } = require('./logging')
|
||||
|
||||
const docker = {
|
||||
engine: new Dockerode({
|
||||
socketPath: process.env.DOCKER_ENGINE
|
||||
}),
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
saveAppLog(event.error, configuration, true)
|
||||
reject(event.error)
|
||||
} else if (event.stream) {
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
@@ -1,75 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.handleErrors = exports.handleValidationError = exports.handleNotFoundError = void 0;
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
const validation_1 = require("./validation");
|
||||
function handleNotFoundError(request, reply) {
|
||||
handleErrors(new http_errors_enhanced_1.NotFoundError('Not found.'), request, reply);
|
||||
}
|
||||
exports.handleNotFoundError = handleNotFoundError;
|
||||
function handleValidationError(error, request) {
|
||||
/*
|
||||
As seen in https://github.com/fastify/fastify/blob/master/lib/validation.js
|
||||
the error.message will always start with the relative section (params, querystring, headers, body)
|
||||
and fastify throws on first failing section.
|
||||
*/
|
||||
const section = error.message.match(/^\w+/)[0];
|
||||
return new http_errors_enhanced_1.BadRequestError('One or more validations failed trying to process your request.', {
|
||||
failedValidations: validation_1.convertValidationErrors(section, Reflect.get(request, section), error.validation)
|
||||
});
|
||||
}
|
||||
exports.handleValidationError = handleValidationError;
|
||||
function handleErrors(error, request, reply) {
|
||||
var _a, _b;
|
||||
// It is a generic error, handle it
|
||||
const code = error.code;
|
||||
if (!('statusCode' in error)) {
|
||||
if ('validation' in error && ((_a = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _a === void 0 ? void 0 : _a.convertValidationErrors)) {
|
||||
// If it is a validation error, convert errors to human friendly format
|
||||
error = handleValidationError(error, request);
|
||||
}
|
||||
else if ((_b = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _b === void 0 ? void 0 : _b.hideUnhandledErrors) {
|
||||
// It is requested to hide the error, just log it and then create a generic one
|
||||
request.log.error({ error: http_errors_enhanced_1.serializeError(error) });
|
||||
error = new http_errors_enhanced_1.InternalServerError('An error occurred trying to process your request.');
|
||||
}
|
||||
else {
|
||||
// Wrap in a HttpError, making the stack explicitily available
|
||||
error = new http_errors_enhanced_1.InternalServerError(http_errors_enhanced_1.serializeError(error));
|
||||
Object.defineProperty(error, 'stack', { enumerable: true });
|
||||
}
|
||||
}
|
||||
else if (code === 'INVALID_CONTENT_TYPE' || code === 'FST_ERR_CTP_INVALID_MEDIA_TYPE') {
|
||||
error = new http_errors_enhanced_1.UnsupportedMediaTypeError(utils_1.upperFirst(validation_1.validationMessagesFormatters.contentType()));
|
||||
}
|
||||
else if (code === 'FST_ERR_CTP_EMPTY_JSON_BODY') {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.jsonEmpty()));
|
||||
}
|
||||
else if (code === 'MALFORMED_JSON' || error.message === 'Invalid JSON' || error.stack.includes('at JSON.parse')) {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.json()));
|
||||
}
|
||||
// Get the status code
|
||||
let { statusCode, headers } = error;
|
||||
// Code outside HTTP range
|
||||
if (statusCode < 100 || statusCode > 599) {
|
||||
statusCode = http_errors_enhanced_1.INTERNAL_SERVER_ERROR;
|
||||
}
|
||||
// Create the body
|
||||
const body = {
|
||||
statusCode,
|
||||
error: http_errors_enhanced_1.messagesByCodes[statusCode],
|
||||
message: error.message
|
||||
};
|
||||
http_errors_enhanced_1.addAdditionalProperties(body, error);
|
||||
// Send the error back
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
reply
|
||||
.code(statusCode)
|
||||
.headers(headers !== null && headers !== void 0 ? headers : {})
|
||||
.type('application/json')
|
||||
.send(body);
|
||||
}
|
||||
exports.handleErrors = handleErrors;
|
||||
@@ -1,58 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.plugin = exports.validationMessagesFormatters = exports.niceJoin = exports.convertValidationErrors = void 0;
|
||||
const fastify_plugin_1 = __importDefault(require("fastify-plugin"));
|
||||
const handlers_1 = require("./handlers");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const validation_1 = require("./validation");
|
||||
__exportStar(require("./handlers"), exports);
|
||||
__exportStar(require("./interfaces"), exports);
|
||||
var validation_2 = require("./validation");
|
||||
Object.defineProperty(exports, "convertValidationErrors", { enumerable: true, get: function () { return validation_2.convertValidationErrors; } });
|
||||
Object.defineProperty(exports, "niceJoin", { enumerable: true, get: function () { return validation_2.niceJoin; } });
|
||||
Object.defineProperty(exports, "validationMessagesFormatters", { enumerable: true, get: function () { return validation_2.validationMessagesFormatters; } });
|
||||
exports.plugin = fastify_plugin_1.default(function (instance, options, done) {
|
||||
var _a, _b, _c, _d;
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const convertResponsesValidationErrors = (_a = options.convertResponsesValidationErrors) !== null && _a !== void 0 ? _a : !isProduction;
|
||||
const configuration = {
|
||||
hideUnhandledErrors: (_b = options.hideUnhandledErrors) !== null && _b !== void 0 ? _b : isProduction,
|
||||
convertValidationErrors: (_c = options.convertValidationErrors) !== null && _c !== void 0 ? _c : true,
|
||||
responseValidatorCustomizer: options.responseValidatorCustomizer,
|
||||
allowUndeclaredResponses: (_d = options.allowUndeclaredResponses) !== null && _d !== void 0 ? _d : false
|
||||
};
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.decorateRequest(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.addHook('onRequest', async (request) => {
|
||||
request[interfaces_1.kHttpErrorsEnhancedConfiguration] = configuration;
|
||||
});
|
||||
instance.setErrorHandler(handlers_1.handleErrors);
|
||||
// instance.setNotFoundHandler(handlers_1.handleNotFoundError);
|
||||
if (convertResponsesValidationErrors) {
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedResponseValidations, []);
|
||||
instance.addHook('onRoute', validation_1.addResponseValidation);
|
||||
instance.addHook('onReady', validation_1.compileResponseValidationSchema.bind(instance, configuration));
|
||||
}
|
||||
done();
|
||||
}, { name: 'fastify-http-errors-enhanced' });
|
||||
exports.default = exports.plugin;
|
||||
// Fix CommonJS exporting
|
||||
/* istanbul ignore else */
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = exports.plugin;
|
||||
Object.assign(module.exports, exports);
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.kHttpErrorsEnhancedResponseValidations = exports.kHttpErrorsEnhancedConfiguration = void 0;
|
||||
exports.kHttpErrorsEnhancedConfiguration = Symbol('fastify-http-errors-enhanced-configuration');
|
||||
exports.kHttpErrorsEnhancedResponseValidations = Symbol('fastify-http-errors-enhanced-response-validation');
|
||||
@@ -1,31 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.get = exports.upperFirst = void 0;
|
||||
function upperFirst(source) {
|
||||
if (typeof source !== 'string' || !source.length) {
|
||||
return source;
|
||||
}
|
||||
return source[0].toUpperCase() + source.substring(1);
|
||||
}
|
||||
exports.upperFirst = upperFirst;
|
||||
function get(target, path) {
|
||||
var _a;
|
||||
const tokens = path.split('.').map((t) => t.trim());
|
||||
for (const token of tokens) {
|
||||
if (typeof target === 'undefined' || target === null) {
|
||||
// We're supposed to be still iterating, but the chain is over - Return undefined
|
||||
target = undefined;
|
||||
break;
|
||||
}
|
||||
const index = token.match(/^(\d+)|(?:\[(\d+)\])$/);
|
||||
if (index) {
|
||||
target = target[parseInt((_a = index[1]) !== null && _a !== void 0 ? _a : index[2], 10)];
|
||||
}
|
||||
else {
|
||||
target = target[token];
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
exports.get = get;
|
||||
@@ -1,239 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.compileResponseValidationSchema = exports.addResponseValidation = exports.convertValidationErrors = exports.validationMessagesFormatters = exports.niceJoin = void 0;
|
||||
const ajv_1 = __importDefault(require("ajv"));
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
function niceJoin(array, lastSeparator = ' and ', separator = ', ') {
|
||||
switch (array.length) {
|
||||
case 0:
|
||||
return '';
|
||||
case 1:
|
||||
return array[0];
|
||||
case 2:
|
||||
return array.join(lastSeparator);
|
||||
default:
|
||||
return array.slice(0, array.length - 1).join(separator) + lastSeparator + array[array.length - 1];
|
||||
}
|
||||
}
|
||||
exports.niceJoin = niceJoin;
|
||||
exports.validationMessagesFormatters = {
|
||||
contentType: () => 'only JSON payloads are accepted. Please set the "Content-Type" header to start with "application/json"',
|
||||
json: () => 'the body payload is not a valid JSON',
|
||||
jsonEmpty: () => 'the JSON body payload cannot be empty if the "Content-Type" header is set',
|
||||
missing: () => 'must be present',
|
||||
unknown: () => 'is not a valid property',
|
||||
uuid: () => 'must be a valid GUID (UUID v4)',
|
||||
timestamp: () => 'must be a valid ISO 8601 / RFC 3339 timestamp (example: 2018-07-06T12:34:56Z)',
|
||||
date: () => 'must be a valid ISO 8601 / RFC 3339 date (example: 2018-07-06)',
|
||||
time: () => 'must be a valid ISO 8601 / RFC 3339 time (example: 12:34:56)',
|
||||
uri: () => 'must be a valid URI',
|
||||
hostname: () => 'must be a valid hostname',
|
||||
ipv4: () => 'must be a valid IPv4',
|
||||
ipv6: () => 'must be a valid IPv6',
|
||||
paramType: (type) => {
|
||||
switch (type) {
|
||||
case 'integer':
|
||||
return 'must be a valid integer number';
|
||||
case 'number':
|
||||
return 'must be a valid number';
|
||||
case 'boolean':
|
||||
return 'must be a valid boolean (true or false)';
|
||||
case 'object':
|
||||
return 'must be a object';
|
||||
case 'array':
|
||||
return 'must be an array';
|
||||
default:
|
||||
return 'must be a string';
|
||||
}
|
||||
},
|
||||
presentString: () => 'must be a non empty string',
|
||||
minimum: (min) => `must be a number greater than or equal to ${min}`,
|
||||
maximum: (max) => `must be a number less than or equal to ${max}`,
|
||||
minimumProperties(min) {
|
||||
return min === 1 ? 'cannot be a empty object' : `must be a object with at least ${min} properties`;
|
||||
},
|
||||
maximumProperties(max) {
|
||||
return max === 0 ? 'must be a empty object' : `must be a object with at most ${max} properties`;
|
||||
},
|
||||
minimumItems(min) {
|
||||
return min === 1 ? 'cannot be a empty array' : `must be an array with at least ${min} items`;
|
||||
},
|
||||
maximumItems(max) {
|
||||
return max === 0 ? 'must be a empty array' : `must be an array with at most ${max} items`;
|
||||
},
|
||||
enum: (values) => `must be one of the following values: ${niceJoin(values.map((f) => `"${f}"`), ' or ')}`,
|
||||
pattern: (pattern) => `must match pattern "${pattern.replace(/\(\?:/g, '(')}"`,
|
||||
invalidResponseCode: (code) => `This endpoint cannot respond with HTTP status ${code}.`,
|
||||
invalidResponse: (code) => `The response returned from the endpoint violates its specification for the HTTP status ${code}.`,
|
||||
invalidFormat: (format) => `must match format "${format}" (format)`
|
||||
};
|
||||
function convertValidationErrors(section, data, validationErrors) {
|
||||
const errors = {};
|
||||
if (section === 'querystring') {
|
||||
section = 'query';
|
||||
}
|
||||
// For each error
|
||||
for (const e of validationErrors) {
|
||||
let message = '';
|
||||
let pattern;
|
||||
let value;
|
||||
let reason;
|
||||
// Normalize the key
|
||||
let key = e.dataPath;
|
||||
if (key.startsWith('.')) {
|
||||
key = key.substring(1);
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.startsWith('[') && key.endsWith(']')) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Depending on the type
|
||||
switch (e.keyword) {
|
||||
case 'required':
|
||||
case 'dependencies':
|
||||
key = e.params.missingProperty;
|
||||
message = exports.validationMessagesFormatters.missing();
|
||||
break;
|
||||
case 'additionalProperties':
|
||||
key = e.params.additionalProperty;
|
||||
message = exports.validationMessagesFormatters.unknown();
|
||||
break;
|
||||
case 'type':
|
||||
message = exports.validationMessagesFormatters.paramType(e.params.type);
|
||||
break;
|
||||
case 'minProperties':
|
||||
message = exports.validationMessagesFormatters.minimumProperties(e.params.limit);
|
||||
break;
|
||||
case 'maxProperties':
|
||||
message = exports.validationMessagesFormatters.maximumProperties(e.params.limit);
|
||||
break;
|
||||
case 'minItems':
|
||||
message = exports.validationMessagesFormatters.minimumItems(e.params.limit);
|
||||
break;
|
||||
case 'maxItems':
|
||||
message = exports.validationMessagesFormatters.maximumItems(e.params.limit);
|
||||
break;
|
||||
case 'minimum':
|
||||
message = exports.validationMessagesFormatters.minimum(e.params.limit);
|
||||
break;
|
||||
case 'maximum':
|
||||
message = exports.validationMessagesFormatters.maximum(e.params.limit);
|
||||
break;
|
||||
case 'enum':
|
||||
message = exports.validationMessagesFormatters.enum(e.params.allowedValues);
|
||||
break;
|
||||
case 'pattern':
|
||||
pattern = e.params.pattern;
|
||||
value = utils_1.get(data, key);
|
||||
if (pattern === '.+' && !value) {
|
||||
message = exports.validationMessagesFormatters.presentString();
|
||||
}
|
||||
else {
|
||||
message = exports.validationMessagesFormatters.pattern(e.params.pattern);
|
||||
}
|
||||
break;
|
||||
case 'format':
|
||||
reason = e.params.format;
|
||||
// Normalize the key
|
||||
if (reason === 'date-time') {
|
||||
reason = 'timestamp';
|
||||
}
|
||||
message = (exports.validationMessagesFormatters[reason] || exports.validationMessagesFormatters.invalidFormat)(reason);
|
||||
break;
|
||||
}
|
||||
// No custom message was found, default to input one replacing the starting verb and adding some path info
|
||||
if (!message.length) {
|
||||
message = `${e.message.replace(/^should/, 'must')} (${e.keyword})`;
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.match(/(?:^['"])(?:[^.]+)(?:['"]$)/)) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Fix empty properties
|
||||
if (!key) {
|
||||
key = '$root';
|
||||
}
|
||||
key = key.replace(/^\//, '');
|
||||
errors[key] = message;
|
||||
}
|
||||
return { [section]: errors };
|
||||
}
|
||||
exports.convertValidationErrors = convertValidationErrors;
|
||||
function addResponseValidation(route) {
|
||||
var _a;
|
||||
if (!((_a = route.schema) === null || _a === void 0 ? void 0 : _a.response)) {
|
||||
return;
|
||||
}
|
||||
const validators = {};
|
||||
/*
|
||||
Add these validators to the list of the one to compile once the server is started.
|
||||
This makes possible to handle shared schemas.
|
||||
*/
|
||||
this[interfaces_1.kHttpErrorsEnhancedResponseValidations].push([
|
||||
this,
|
||||
validators,
|
||||
Object.entries(route.schema.response)
|
||||
]);
|
||||
// Note that this hook is not called for non JSON payloads therefore validation is not possible in such cases
|
||||
route.preSerialization = async function (request, reply, payload) {
|
||||
const statusCode = reply.raw.statusCode;
|
||||
// Never validate error 500
|
||||
if (statusCode === http_errors_enhanced_1.INTERNAL_SERVER_ERROR) {
|
||||
return payload;
|
||||
}
|
||||
// No validator, it means the HTTP status is not allowed
|
||||
const validator = validators[statusCode];
|
||||
if (!validator) {
|
||||
if (request[interfaces_1.kHttpErrorsEnhancedConfiguration].allowUndeclaredResponses) {
|
||||
return payload;
|
||||
}
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponseCode(statusCode));
|
||||
}
|
||||
// Now validate the payload
|
||||
const valid = validator(payload);
|
||||
if (!valid) {
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponse(statusCode), {
|
||||
failedValidations: convertValidationErrors('response', payload, validator.errors)
|
||||
});
|
||||
}
|
||||
return payload;
|
||||
};
|
||||
}
|
||||
exports.addResponseValidation = addResponseValidation;
|
||||
function compileResponseValidationSchema(configuration) {
|
||||
// Fix CJS/ESM interoperability
|
||||
// @ts-expect-error
|
||||
let AjvConstructor = ajv_1.default;
|
||||
/* istanbul ignore next */
|
||||
if (AjvConstructor.default) {
|
||||
AjvConstructor = AjvConstructor.default;
|
||||
}
|
||||
const hasCustomizer = typeof configuration.responseValidatorCustomizer === 'function';
|
||||
for (const [instance, validators, schemas] of this[interfaces_1.kHttpErrorsEnhancedResponseValidations]) {
|
||||
// @ts-expect-error
|
||||
const compiler = new AjvConstructor({
|
||||
// The fastify defaults, with the exception of removeAdditional and coerceTypes, which have been reversed
|
||||
removeAdditional: false,
|
||||
useDefaults: true,
|
||||
coerceTypes: false,
|
||||
allErrors: true
|
||||
});
|
||||
compiler.addSchema(Object.values(instance.getSchemas()));
|
||||
compiler.addKeyword('example');
|
||||
if (hasCustomizer) {
|
||||
configuration.responseValidatorCustomizer(compiler);
|
||||
}
|
||||
for (const [code, schema] of schemas) {
|
||||
validators[code] = compiler.compile(schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.compileResponseValidationSchema = compileResponseValidationSchema;
|
||||
@@ -1,48 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
const axios = require('axios')
|
||||
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const Settings = require('../models/Settings')
|
||||
const { version } = require('../../package.json')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
const patterns = [
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
||||
].join('|')
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
const clearedEvent = '[ERROR 😱] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog (error) {
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const payload = { message: error.message, stack: error.stack, type: error.type || 'spaghetticode', version }
|
||||
|
||||
const found = await ServerLog.find(payload)
|
||||
if (found.length === 0 && error.message) await new ServerLog(payload).save()
|
||||
if (settings && settings.sendErrors && process.env.NODE_ENV === 'production') await axios.post('https://errors.coollabs.io/api/error', payload)
|
||||
}
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const deploymentSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
nickname: { type: String, required: true },
|
||||
repoId: { type: Number, required: true },
|
||||
organization: { type: String, required: true },
|
||||
name: { type: String, required: true },
|
||||
branch: { type: String, required: true },
|
||||
domain: { type: String, required: true },
|
||||
progress: { type: String, require: true, default: 'queued' }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('deployment', deploymentSchema)
|
||||
@@ -1,10 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
event: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-application', logSchema)
|
||||
@@ -1,14 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, default: version },
|
||||
type: { type: String, required: true },
|
||||
message: { type: String, required: true },
|
||||
stack: { type: String },
|
||||
seen: { type: Boolean, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-server', logSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false },
|
||||
sendErrors: { type: Boolean, required: true, default: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('settings', settingsSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
email: { type: String, required: true },
|
||||
avatar: { type: String },
|
||||
uid: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('user', userSchema)
|
||||
@@ -1,21 +0,0 @@
|
||||
const fp = require('fastify-plugin')
|
||||
const User = require('../models/User')
|
||||
module.exports = fp(async function (fastify, options, next) {
|
||||
fastify.register(require('fastify-jwt'), {
|
||||
secret: fastify.config.JWT_SIGN_KEY
|
||||
})
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
try {
|
||||
const { jti } = await request.jwtVerify()
|
||||
const found = await User.findOne({ uid: jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
} catch (err) {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
})
|
||||
next()
|
||||
})
|
||||
@@ -1,37 +0,0 @@
|
||||
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
try {
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
let foundDomain = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id &&
|
||||
running.publish.path === configuration.publish.path
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
|
||||
if (foundDomain) {
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
return { message: 'OK' }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../../libs/logging')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
let configuration
|
||||
try {
|
||||
await verifyUserId(request.headers.authorization)
|
||||
} catch (error) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
configuration = setDefaultConfiguration(request.body)
|
||||
if (!configuration) {
|
||||
throw new Error('Whaat?')
|
||||
}
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name, deployId: configuration.general.deployId })
|
||||
await queueAndBuild(configuration, imageChanged)
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name) {
|
||||
if (error.message && error.stack) await saveServerLog(error)
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const getLogSchema = {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
repoId: { type: 'string' },
|
||||
branch: { type: 'string' }
|
||||
},
|
||||
required: ['repoId', 'branch']
|
||||
}
|
||||
}
|
||||
fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
|
||||
try {
|
||||
const { repoId, branch, page } = request.query
|
||||
const onePage = 5
|
||||
const show = Number(page) * onePage || 5
|
||||
const deploy = await Deployment.find({ repoId, branch })
|
||||
.select('-_id -__v -repoId')
|
||||
.sort({ createdAt: 'desc' })
|
||||
.limit(show)
|
||||
|
||||
const finalLogs = deploy.map(d => {
|
||||
const finalLogs = { ...d._doc }
|
||||
|
||||
const updatedAt = dayjs(d.updatedAt).utc()
|
||||
|
||||
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
|
||||
finalLogs.since = updatedAt.fromNow()
|
||||
|
||||
return finalLogs
|
||||
})
|
||||
return finalLogs
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const logs = await ApplicationLog.find({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'asc' })
|
||||
|
||||
const deploy = await Deployment.findOne({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'desc' })
|
||||
|
||||
const finalLogs = {}
|
||||
finalLogs.progress = deploy.progress
|
||||
finalLogs.events = logs.map(log => log.event)
|
||||
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt))
|
||||
return finalLogs
|
||||
} catch (e) {
|
||||
throw new Error('No logs found')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { name } = request.query
|
||||
const service = await docker.engine.getService(`${name}_${name}`)
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { organization, name, branch } = request.body
|
||||
let found = false
|
||||
try {
|
||||
(await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.organization === organization &&
|
||||
running.repository.name === name &&
|
||||
running.repository.branch === branch) {
|
||||
found = running
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const deploys = await Deployment.find({ organization, branch, name })
|
||||
for (const deploy of deploys) {
|
||||
await ApplicationLog.deleteMany({ deployId: deploy.deployId })
|
||||
await Deployment.deleteMany({ deployId: deploy.deployId })
|
||||
}
|
||||
await execShellAsync(`docker stack rm ${found.build.container.name}`)
|
||||
reply.code(200).send({ organization, name, branch })
|
||||
} else {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
} catch (error) {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const { docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { name, organization, branch } = request.body
|
||||
const services = await docker.engine.listServices()
|
||||
const applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
const found = applications.find(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
if (branch) {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization && configuration.repository.branch === branch) {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
return JSON.parse(found.Spec.Labels.configuration)
|
||||
} else {
|
||||
reply.code(500).send({ message: 'No configuration found.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const latestDeployments = await Deployment.aggregate([
|
||||
{
|
||||
$sort: { createdAt: -1 }
|
||||
},
|
||||
{
|
||||
$group:
|
||||
{
|
||||
_id: {
|
||||
repoId: '$repoId',
|
||||
branch: '$branch'
|
||||
},
|
||||
createdAt: { $last: '$createdAt' },
|
||||
progress: { $first: '$progress' }
|
||||
}
|
||||
}
|
||||
])
|
||||
const serverLogs = await ServerLog.find()
|
||||
const services = await docker.engine.listServices()
|
||||
let applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
if (JSON.parse(r.Spec.Labels.configuration)) {
|
||||
const configuration = JSON.parse(r.Spec.Labels.configuration)
|
||||
const status = latestDeployments.find(l => configuration.repository.id === l._id.repoId && configuration.repository.branch === l._id.branch)
|
||||
if (status && status.progress) r.progress = status.progress
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
}
|
||||
return {}
|
||||
})
|
||||
databases = databases.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain + item.Spec.Labels.configuration.publish.path, item])).values()]
|
||||
return {
|
||||
serverLogs,
|
||||
applications: {
|
||||
deployed: applications
|
||||
},
|
||||
databases: {
|
||||
deployed: databases
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && error.errno === -2) {
|
||||
throw new Error(`Docker service unavailable at ${error.address}.`)
|
||||
} else {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const database = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId)
|
||||
if (database) {
|
||||
const jsonEnvs = {}
|
||||
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
const s = d.split('=')
|
||||
jsonEnvs[s[0]] = s[1]
|
||||
}
|
||||
const payload = {
|
||||
config: JSON.parse(database.Spec.Labels.configuration),
|
||||
envs: jsonEnvs
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('No database found?')
|
||||
}
|
||||
})
|
||||
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: { type: 'string', enum: ['mongodb', 'postgresql', 'mysql', 'couchdb'] }
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
let { type, defaultDatabaseName } = request.body
|
||||
const passwords = generator.generateMultiple(2, {
|
||||
length: 24,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
const usernames = generator.generateMultiple(2, {
|
||||
length: 10,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
// TODO: Query for existing db with the same name
|
||||
const nickname = getUniq()
|
||||
|
||||
if (!defaultDatabaseName) defaultDatabaseName = nickname
|
||||
|
||||
reply.code(201).send({ message: 'Deploying.' })
|
||||
// TODO: Persistent volume, custom inputs
|
||||
const deployId = cuid()
|
||||
const configuration = {
|
||||
general: {
|
||||
workdir: `/tmp/${deployId}`,
|
||||
deployId,
|
||||
nickname,
|
||||
type
|
||||
},
|
||||
database: {
|
||||
usernames,
|
||||
passwords,
|
||||
defaultDatabaseName
|
||||
},
|
||||
deploy: {
|
||||
name: nickname
|
||||
}
|
||||
}
|
||||
let generateEnvs = {}
|
||||
let image = null
|
||||
let volume = null
|
||||
if (type === 'mongodb') {
|
||||
generateEnvs = {
|
||||
MONGODB_ROOT_PASSWORD: passwords[0],
|
||||
MONGODB_USERNAME: usernames[0],
|
||||
MONGODB_PASSWORD: passwords[1],
|
||||
MONGODB_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mongodb:4.4'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
|
||||
} else if (type === 'postgresql') {
|
||||
generateEnvs = {
|
||||
POSTGRESQL_PASSWORD: passwords[0],
|
||||
POSTGRESQL_USERNAME: usernames[0],
|
||||
POSTGRESQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/postgresql:13.2.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
|
||||
} else if (type === 'couchdb') {
|
||||
generateEnvs = {
|
||||
COUCHDB_PASSWORD: passwords[0],
|
||||
COUCHDB_USER: usernames[0]
|
||||
}
|
||||
image = 'bitnami/couchdb:3'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
|
||||
} else if (type === 'mysql') {
|
||||
generateEnvs = {
|
||||
MYSQL_ROOT_PASSWORD: passwords[0],
|
||||
MYSQL_ROOT_USER: usernames[0],
|
||||
MYSQL_USER: usernames[1],
|
||||
MYSQL_PASSWORD: passwords[1],
|
||||
MYSQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mysql:8.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
|
||||
}
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[configuration.general.deployId]: {
|
||||
image,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
volumes: [volume],
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
update_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=database',
|
||||
'configuration=' + JSON.stringify(configuration)
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${configuration.general.deployId}-${type}-data`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.delete('/:dbName', async (request, reply) => {
|
||||
const { dbName } = request.params
|
||||
await execShellAsync(`docker stack rm ${dbName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
const axios = require('axios')
|
||||
const User = require('../../../models/User')
|
||||
const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: { type: 'string' }
|
||||
},
|
||||
required: ['code']
|
||||
}
|
||||
}
|
||||
}
|
||||
fastify.get('/app', { schema: githubCodeSchema }, async (request, reply) => {
|
||||
const { code } = request.query
|
||||
try {
|
||||
const { data } = await axios({
|
||||
method: 'post',
|
||||
url: `https://github.com/login/oauth/access_token?client_id=${fastify.config.VITE_GITHUB_APP_CLIENTID}&client_secret=${fastify.config.GITHUB_APP_CLIENT_SECRET}&code=${code}`,
|
||||
headers: {
|
||||
accept: 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
const token = data.access_token
|
||||
const githubAxios = axios.create({
|
||||
baseURL: 'https://api.github.com'
|
||||
})
|
||||
|
||||
githubAxios.defaults.headers.common.Accept = 'Application/json'
|
||||
githubAxios.defaults.headers.common.Authorization = `token ${token}`
|
||||
|
||||
try {
|
||||
let uid = cuid()
|
||||
const { avatar_url } = (await githubAxios.get('/user')).data // eslint-disable-line
|
||||
const email = (await githubAxios.get('/user/emails')).data.filter(
|
||||
(e) => e.primary
|
||||
)[0].email
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const registeredUsers = await User.find().countDocuments()
|
||||
const foundUser = await User.findOne({ email })
|
||||
if (foundUser) {
|
||||
await User.findOneAndUpdate(
|
||||
{ email },
|
||||
{ avatar: avatar_url },
|
||||
{ upsert: true, new: true }
|
||||
)
|
||||
uid = foundUser.uid
|
||||
} else {
|
||||
if (registeredUsers === 0) {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
const defaultSettings = new Settings({
|
||||
_id: new mongoose.Types.ObjectId()
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
await defaultSettings.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (!settings && registeredUsers > 0) {
|
||||
reply.code(500).send('Registration disabled, enable it in settings.')
|
||||
} else {
|
||||
if (!settings.allowRegistration) {
|
||||
reply.code(500).send('You are not allowed here!')
|
||||
} else {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const jwtToken = jwt.sign({}, fastify.config.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolLabs',
|
||||
issuer: 'coolLabs',
|
||||
jwtid: uid,
|
||||
subject: `User:${uid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
reply
|
||||
.code(200)
|
||||
.redirect(
|
||||
302,
|
||||
`/api/v1/login/github/success?jwtToken=${jwtToken}&ghToken=${token}`
|
||||
)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
return reply.sendFile('bye.html')
|
||||
})
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
const Server = require('../../../models/Logs/Server')
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const serverLogs = await Server.find().select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
return {
|
||||
serverLogs
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' },
|
||||
sendErrors: { type: 'boolean' }
|
||||
},
|
||||
required: []
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
let settings = await Settings.findOne({ applicationName }).select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
if (!settings) {
|
||||
settings = {
|
||||
applicationName,
|
||||
allowRegistration: false
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
const settings = await Settings.findOneAndUpdate(
|
||||
{ applicationName },
|
||||
{ applicationName, ...request.body },
|
||||
{ upsert: true, new: true }
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
reply.code(200).send('NO')
|
||||
})
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"')
|
||||
await saveServerLog({ message: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"')
|
||||
await saveServerLog({ message: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
const User = require('../../models/User')
|
||||
const jwt = require('jsonwebtoken')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { authorization } = request.headers
|
||||
if (!authorization) {
|
||||
reply.code(401).send({})
|
||||
return
|
||||
}
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
found ? reply.code(200).send({}) : reply.code(401).send({})
|
||||
} catch (error) {
|
||||
reply.code(401).send({})
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp } = require('../../../libs/common')
|
||||
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const cloneRepository = require('../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// TODO: Add this to fastify plugin
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ref: { type: 'string' },
|
||||
repository: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' },
|
||||
full_name: { type: 'string' }
|
||||
},
|
||||
required: ['id', 'full_name']
|
||||
},
|
||||
installation: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' }
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
required: ['ref', 'repository', 'installation']
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
let configuration
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
if (request.headers['x-github-event'] !== 'push') {
|
||||
reply.code(500).send({ error: 'Not a push event.' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
|
||||
) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
})
|
||||
if (!configuration) {
|
||||
reply.code(500).send({ error: 'No configuration found.' })
|
||||
return
|
||||
}
|
||||
|
||||
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
queueAndBuild(configuration, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name === 'Error') {
|
||||
// Error during runtime
|
||||
await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
} else {
|
||||
// Error in my code
|
||||
const payload = { message: error.message, stack: error.stack, type: 'spaghetticode' }
|
||||
if (error.message && error.stack) await new ServerLog(payload).save()
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: [
|
||||
'DOMAIN',
|
||||
'EMAIL',
|
||||
'VITE_GITHUB_APP_CLIENTID',
|
||||
'GITHUB_APP_CLIENT_SECRET',
|
||||
'GITHUB_APP_PRIVATE_KEY',
|
||||
'GITHUP_APP_WEBHOOK_SECRET',
|
||||
'JWT_SIGN_KEY',
|
||||
'SECRETS_ENCRYPTION_KEY'
|
||||
],
|
||||
properties: {
|
||||
DOMAIN: {
|
||||
type: 'string'
|
||||
},
|
||||
EMAIL: {
|
||||
type: 'string'
|
||||
},
|
||||
VITE_GITHUB_APP_CLIENTID: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_CLIENT_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_PRIVATE_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUP_APP_WEBHOOK_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
JWT_SIGN_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
DOCKER_ENGINE: {
|
||||
type: 'string',
|
||||
default: '/var/run/docker.sock'
|
||||
},
|
||||
DOCKER_NETWORK: {
|
||||
type: 'string',
|
||||
default: 'coollabs'
|
||||
},
|
||||
SECRETS_ENCRYPTION_KEY: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { schema }
|
||||
117
api/server.js
117
api/server.js
@@ -1,117 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const axios = require('axios')
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const { execShellAsync } = require('./libs/common')
|
||||
const { purgeImagesContainers, cleanupStuckedDeploymentsInDB } = require('./libs/applications/cleanup')
|
||||
const fastify = require('fastify')({
|
||||
trustProxy: true,
|
||||
logger: {
|
||||
level: 'error'
|
||||
}
|
||||
})
|
||||
fastify.register(require('../api/libs/http-error'))
|
||||
|
||||
const { schema } = require('./schema')
|
||||
|
||||
process.on('unhandledRejection', async (reason, p) => {
|
||||
await saveServerLog({ message: reason.message, type: 'unhandledRejection' })
|
||||
})
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../dist/')
|
||||
})
|
||||
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
reply.sendFile('index.html')
|
||||
})
|
||||
} else {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../public/')
|
||||
})
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
`mongodb://${process.env.MONGODB_USER}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DB}?authSource=${process.env.MONGODB_DB}&readPreference=primary&ssl=false`,
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
} else {
|
||||
mongoose.connect(
|
||||
'mongodb://localhost:27017/coolify?&readPreference=primary&ssl=false',
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
}
|
||||
|
||||
mongoose.connection.on(
|
||||
'error',
|
||||
console.error.bind(console, 'connection error:')
|
||||
)
|
||||
mongoose.connection.once('open', async function () {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.listen(3000, '0.0.0.0')
|
||||
console.log('Coolify API is up and running in production.')
|
||||
} else {
|
||||
const logFile = fs.createWriteStream('api/development/console.log', { flags: 'w' })
|
||||
const logStdout = process.stdout
|
||||
|
||||
console.log = function (d) {
|
||||
logFile.write(`[INFO]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.error = function (d) {
|
||||
logFile.write(`[ERROR]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.warn = function (d) {
|
||||
logFile.write(`[WARN]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
try {
|
||||
const { main } = (await axios.get('https://get.coollabs.io/version.json')).data.coolify
|
||||
if (main.clearServerLogs) {
|
||||
await mongoose.connection.db.dropCollection('logs-servers')
|
||||
}
|
||||
} catch (error) {
|
||||
// Could not cleanup logs-servers collection
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
try {
|
||||
await cleanupStuckedDeploymentsInDB()
|
||||
} catch (error) {
|
||||
// Could not cleanup DB 🤔
|
||||
}
|
||||
try {
|
||||
// Doing because I do not want to prune these images. Prune skips coolify-reserve labeled images.
|
||||
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04', 'php:apache', 'rust:latest']
|
||||
for (const image of basicImages) {
|
||||
await execShellAsync(`echo "FROM ${image}" | docker build --label coolify-reserve=true -t ${image} -`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Could not pull some basic images from Docker Hub.')
|
||||
console.log(error)
|
||||
}
|
||||
try {
|
||||
await purgeImagesContainers()
|
||||
} catch (error) {
|
||||
console.log('Could not purge containers/images.')
|
||||
console.log(error)
|
||||
}
|
||||
})
|
||||
11
data/docker/daemon.json
Normal file
11
data/docker/daemon.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "100m",
|
||||
"max-file": "5"
|
||||
},
|
||||
"features": {
|
||||
"buildkit": true
|
||||
},
|
||||
"live-restore": true
|
||||
}
|
||||
29
data/haproxy/dataplaneapi.hcl
Normal file
29
data/haproxy/dataplaneapi.hcl
Normal file
@@ -0,0 +1,29 @@
|
||||
config_version = 2
|
||||
name = "easy_gar"
|
||||
mode = "single"
|
||||
status = "null"
|
||||
|
||||
dataplaneapi {
|
||||
host = "0.0.0.0"
|
||||
port = 5555
|
||||
|
||||
transaction {
|
||||
transaction_dir = "/tmp/haproxy"
|
||||
}
|
||||
|
||||
advertised {
|
||||
api_address = ""
|
||||
api_port = 0
|
||||
}
|
||||
}
|
||||
|
||||
haproxy {
|
||||
config_file = "/usr/local/etc/haproxy/haproxy.cfg"
|
||||
haproxy_bin = "/usr/local/sbin/haproxy"
|
||||
|
||||
reload {
|
||||
reload_delay = 2
|
||||
reload_cmd = "kill -HUP 1"
|
||||
restart_cmd = "kill -SIGUSR2 1"
|
||||
}
|
||||
}
|
||||
19
data/haproxy/haproxy.cfg-http.template
Normal file
19
data/haproxy/haproxy.cfg-http.template
Normal file
@@ -0,0 +1,19 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 60s
|
||||
timeout connect 10s
|
||||
timeout client 60s
|
||||
timeout server 60s
|
||||
|
||||
frontend "${APP}"
|
||||
mode http
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode http
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
@@ -0,0 +1,15 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode tcp
|
||||
log global
|
||||
|
||||
frontend "${APP}"
|
||||
mode tcp
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode tcp
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
38
data/haproxy/haproxy.cfg.template
Normal file
38
data/haproxy/haproxy.cfg.template
Normal file
@@ -0,0 +1,38 @@
|
||||
global
|
||||
stats socket /var/run/api.sock user haproxy group haproxy mode 660 level admin expose-fd listeners
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 60s
|
||||
timeout connect 10s
|
||||
timeout client 60s
|
||||
timeout server 60s
|
||||
|
||||
userlist haproxy-dataplaneapi
|
||||
user admin insecure-password "${HAPROXY_PASSWORD}"
|
||||
|
||||
frontend http
|
||||
mode http
|
||||
bind :80
|
||||
bind :443 ssl crt /usr/local/etc/haproxy/ssl/ alpn h2,http/1.1
|
||||
acl is_certbot path_beg /.well-known/acme-challenge/
|
||||
use_backend backend-certbot if is_certbot
|
||||
use_backend %[req.hdr(host),lower]
|
||||
|
||||
frontend stats
|
||||
bind *:8404
|
||||
stats enable
|
||||
stats uri /
|
||||
stats refresh 5s
|
||||
stats admin if TRUE
|
||||
stats auth "${HAPROXY_USERNAME}:${HAPROXY_PASSWORD}"
|
||||
|
||||
backend backend-certbot
|
||||
mode http
|
||||
server certbot host.docker.internal:9080
|
||||
|
||||
program api
|
||||
command /usr/bin/dataplaneapi -f /usr/local/etc/haproxy/dataplaneapi.hcl --userlist haproxy-dataplaneapi
|
||||
no option start-on-reload
|
||||
81
data/haproxy/ssl/default.pem
Normal file
81
data/haproxy/ssl/default.pem
Normal file
@@ -0,0 +1,81 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFETCCAvkCFE/5JtU5geT5hOjFuQPiLgCYHwsOMA0GCSqGSIb3DQEBCwUAMEUx
|
||||
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
|
||||
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjExMDExMDkwNzQ1WhcNMzExMDA5MDkw
|
||||
NzQ1WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
|
||||
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC
|
||||
Ag8AMIICCgKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
ATANBgkqhkiG9w0BAQsFAAOCAgEAGQED96wBGzbMUlk9mIvZeLerzEAB3YfgfAYa
|
||||
EAi79QHxM8UX06xmA2xtGvJSvlU8Xods9vxpBmIUnbDRTIAHNDApT19+vPg/iSfQ
|
||||
1J9Fo4b5kjmWL6SalEdYcxqH9V/QndHta4MXP91u/ZsJ/exwDTZFatXsfGkPjUmN
|
||||
Xp+Ip6iQg7+kV3JpRnMSbevj2Oujs7qTAdQedH38ZTNS0AaM5gvZyQkccCTKNBQ4
|
||||
3O8MhCau7U0EUirndqsQXa0D3o78FpKztLNXSM7919jU2y36kMrWXfArfrBKHJ9b
|
||||
nZeO7nkbHgvmVS8NTg9pR7L7u+YXTa2p1H2ZnpMQvruV7iL/Pb1H2N68UdvnQScL
|
||||
sgacGSzM6b6PVdWRbECiuzC0UyWLZo/LoU3DQFGoiDQ4e/B3+TMrvgFI0CnpAQ4w
|
||||
qiaVFJlRQeF4GaS4qHsN28OBliFATB3TXONFnz1aVkQlEHuh2+JbuL1b1lxvlX5t
|
||||
gBbu/GgAcP4Uy2z4PoDmempAvNi2kCcLB98m+jbFSMSB3nkrdj6MzyN7kW9bhk3T
|
||||
ClimxDmc23seprwLcxJUPP5q+HRB1VLKXLwIYxu+Up3g29d4k1Iy9nUUP9lITLTk
|
||||
blJxZ2BPuQqTLzyqmAEWa1HxljFC1b7oMp9a98PbxC3MxUggM7zx/rgXWxM8osib
|
||||
uwSZmw0=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
AQKCAgEAm1/z33Jwk4crTQAjJ0uBqxm1pW/ndSq4MO8cEzEGjL8F7iWK+/P8LiGV
|
||||
+sPWuuRzX7/N3OVDiFOgnqeniNWV7vK7XE9T0GMN4ALiyVW/D4mIxKOeA7jXycOq
|
||||
aap0DPdCFFbZVLkL10Vhp77LyHFjEsJn/4oTBRk0y1LG/as9bOMD6j29/X7hEL20
|
||||
LOU4LQzEW26YU7lqD+nKlijFjHYSTolRrOBPe/fE1BxxXLFOKfMKbcaygc8xCzTu
|
||||
fhQ8Nep45BtSuQ9Yq/WfSLFecemWR8yvH0k37yxjBknHVD23maZ+/PEEPKWM/2+g
|
||||
IzGsmZrBILVmOb2/v9CWxqY0JEfQ6aU/nLW1ZiXSOIPmKEooK/hPVxFIyQ1yET4G
|
||||
kQZ5RroY/QDrI14ms8P0iDzZ8K3EFKUyjiBbc83Mb0YIZ4hKd76gioOUIPeEQB+y
|
||||
QLZ8Cb9YS3V8uIOJg+F4xlpJSePAZphfSxRLojSiKUeCs8gNUxGz0zwiMNf1p76F
|
||||
8CaLgvSwT/cgQjWitMeeE1Ha+8lY8VzESmd10gPk2uES/qdrmMhwFwovPqqrtMqj
|
||||
kMrFKNy7Crka6me3dhKEtryRTk5ho2IS/VCy/eXQ7lUW8Cl4uFxmjpHYSJMqDWvC
|
||||
vu1p3/B1psSZIy2V2M9QqwZCysHqvGJMOCvYmnc6T62+kDRKQ7ECggEBAOS/1ptA
|
||||
75OBAsHLovkspiCvn3gb/VTvH6LOvxYTohjr5iBeX137vg0aR1rg0jwcdf8EEJYw
|
||||
4YxOid7KmV7O25ujzduQgwpVgujnJAeBLeLDC5dVbq3PQah61AvR2O/7t+Ls3oxi
|
||||
cWh/OHC6SeZ/n406cxSCCUpVwtgHTaNFzaSmpDdEOSbjvXjQQjiRsG7j/1u64riq
|
||||
RlJ/hIUlcys0g94yeN/5lPaNfsq0+vTSAYuTVVXVbEntwWcZVZxnQJviZVgJ99zM
|
||||
RzE3sprvvr+I5QQ4FRMn0W9U7gblSJd5FGEL8gye4SRd+LxoUL4DR6pfuwd0vlXA
|
||||
g+dgiOKoHm2Bb8cCggEBAMDMHMNR6uipdMivPjBTlklnaYd9SY3c5x65yNtx4CNh
|
||||
rXyvy/6YvME7PPnKQZ8TQ4DkbVDUCAF7wnyAJJ7eWMav3bNlqWWjzaBvQz4Fn0XG
|
||||
/1W5R1CoJ9DW5FY3f9efJzQTmfn6dIlCx1gW7XfVBZQqI1LORMWUYenk0KAvjlg2
|
||||
UHYYl/BT1RhtYyzOJHto1PaUvCNDiOiDWAkTpLigYm7hGgVmcSwbo4F54SNUHdV7
|
||||
yz3CorCM4VsEYYSL80WHYxf/Zc+mcIDoWOdog0iEeK/Zu++yG5lPRxC1862GmsZA
|
||||
J06BMqX+NVGOfiGcVaGH+SZJXeFcrr7F8ZWp6y38QSMCggEAJwzo4hAv1gqMIfFV
|
||||
nRwWMDZLDwIYOUupJu4MiQRJA+AhpRz3QuAbDbmSvNzshv6E1kgnXLxzhLRTrQkB
|
||||
LcI6k1NfbUA6XqVCd+gdqnpPDwslC2y2PE3Jc62kTXBBjJZ4SfEN/QFBQwmU5Qmo
|
||||
XAUlg8KaqsGYPGxvmtmEU38zIAyitByddRoj2mATLf0RFZ0ulsZMtiG7Z5IFWYWP
|
||||
J60LZf9Py0ycNYrqPkivHuRLBzzbsI+CsQw5nBQjHVQzH2mCy4jIG5V0Ad70Sqbq
|
||||
9V+1WQcJ8f82Lb9d8ydpQRKWfArCA42L+d1g/SkBv65nqZo2H4u6goEfA3zjYW45
|
||||
44/ZOQKCAQB440MhwYqe6ioc76z51l+ElUAZQZjOR/XvUSS9XHDjHosOhJhPgmvQ
|
||||
aZl5MrXkzcpk1lYo+Vovu+8d66eKqfZWVs2XgCYwYf48G6e5CwNsWDOgB7XMwDN/
|
||||
Ak9YNCKIC/Yj9Cp3EPDjZCjkdjPeEIcX+Tf+4vFCRiEC7INX/ZmufBgFhLQ4cAhM
|
||||
8cHexT8g1oG6P1acces1h626u0NstLwjtCeBvVM3CfmC5O4jHco7Iw00I4epVhyz
|
||||
2lJfLvWR4itjT7QB+OXQHmAocWLoJJAcC1WJHU+q2IfB1aT+aElCB9XdpqsgY/4A
|
||||
rm0uG/2hdEXoGNaxyVCUtD8fzdR2GBarAoIBACCYXXREMYb6i1TbR5Q2LvVQUPsO
|
||||
Hgnbr+PLmx93rfUzDcr5r+cJgryjYQDKJTRleDJhg80M3RYOq+IOdl6yxOmRATmJ
|
||||
ZDgwRVD1F6VFxBJePcAW30FI5CoBogsHaZQDKGsopEaDRLK5E3QHUVG5qj323RdI
|
||||
Unf1++wI4nw+qwsVf1gSTcAdzq29v3NIWUyvvrmTNO4MxFTt0/lqkCsdT/2EFQDB
|
||||
/yQ1HCtQQjXE1xlYh0BnMZp9+4FmrlMC9Oj5H0dDSWmInPION0ft8/SjBj4TQ5Qi
|
||||
2DUo1WOWQnVR8Bxz0B8McXS+dOmgLe8ws4/ez7DoEVqHTgirKqBg5qRFQKw=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
0
db/.gitkeep
Normal file
0
db/.gitkeep
Normal file
20
docker-compose-dev.yaml
Normal file
20
docker-compose-dev.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: 'bitnami/redis:6.2'
|
||||
container_name: coolify-redis
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
networks:
|
||||
- coolify-infra
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 6379
|
||||
protocol: tcp
|
||||
mode: host
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
23
docker-compose-haproxy.yaml
Normal file
23
docker-compose-haproxy.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
haproxy:
|
||||
image: coollabsio/coolify-haproxy-alpine:latest
|
||||
container_name: coolify-haproxy
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
- coolify
|
||||
volumes:
|
||||
- './data/haproxy/:/usr/local/etc/haproxy/'
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
- '8404:8404'
|
||||
- '5555:5555'
|
||||
- '3306:3306'
|
||||
|
||||
networks:
|
||||
coolify:
|
||||
attachable: true
|
||||
name: coolify
|
||||
43
docker-compose.yaml
Normal file
43
docker-compose.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
coolify:
|
||||
image: coollabsio/coolify:${TAG:-latest}
|
||||
restart: always
|
||||
container_name: coolify
|
||||
ports:
|
||||
- target: 3000
|
||||
published: 3000
|
||||
protocol: tcp
|
||||
mode: host
|
||||
volumes:
|
||||
- 'coolify-db:/app/db'
|
||||
- 'coolify-ssl-certs:/app/ssl'
|
||||
- 'coolify-letsencrypt:/etc/letsencrypt'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
env_file:
|
||||
- '.env'
|
||||
networks:
|
||||
- coolify-infra
|
||||
depends_on: ['redis']
|
||||
redis:
|
||||
image: bitnami/redis:6.2
|
||||
restart: always
|
||||
container_name: coolify-redis
|
||||
environment:
|
||||
- ALLOW_EMPTY_PASSWORD=yes
|
||||
networks:
|
||||
- coolify-infra
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
|
||||
volumes:
|
||||
coolify-db:
|
||||
name: coolify-db
|
||||
coolify-ssl-certs:
|
||||
name: coolify-ssl-certs
|
||||
coolify-letsencrypt:
|
||||
name: coolify-letsencrypt
|
||||
6
haproxy-http.Dockerfile
Normal file
6
haproxy-http.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-http.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
haproxy-tcp.Dockerfile
Normal file
6
haproxy-tcp.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-tcp.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
haproxy.Dockerfile
Normal file
6
haproxy.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
20
index.html
20
index.html
@@ -1,20 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="/favicon.png" />
|
||||
<link rel="preload" as="image" href="/favicon.png">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>coolify: Heroku & Netlify alternative</title>
|
||||
<link rel="dns-prefetch" href="https://cdn.coollabs.io/" />
|
||||
<link rel="preconnect" href="https://cdn.coollabs.io/" crossorigin="" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/fonts/montserrat/montserrat.css" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/css/microtip-0.2.2.min.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script type="module" src="/src/index.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
88
install.sh
88
install.sh
@@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
preTasks() {
|
||||
echo '
|
||||
##############################
|
||||
#### Pulling Git Updates #####
|
||||
##############################'
|
||||
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git pull
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##############################
|
||||
#### Building Base Image #####
|
||||
##############################'
|
||||
docker build --label coolify-reserve=true -t coolify-base -f install/Dockerfile-base .
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##################################
|
||||
#### Checking configuration. #####
|
||||
##################################'
|
||||
docker run --rm -w /usr/src/app coolify-base node install/install.js --check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
##################################
|
||||
#### Missing configuration ! #####
|
||||
##################################'
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
"all")
|
||||
preTasks
|
||||
echo '
|
||||
#################################
|
||||
#### Rebuilding everything. #####
|
||||
#################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type all
|
||||
;;
|
||||
"coolify")
|
||||
preTasks
|
||||
echo '
|
||||
##############################
|
||||
#### Rebuilding Coolify. #####
|
||||
##############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type coolify
|
||||
;;
|
||||
"proxy")
|
||||
preTasks
|
||||
echo '
|
||||
############################
|
||||
#### Rebuilding Proxy. #####
|
||||
############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type proxy
|
||||
;;
|
||||
"upgrade-phase-1")
|
||||
preTasks
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P1. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type upgrade
|
||||
;;
|
||||
"upgrade-phase-2")
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P2. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/update.js --type upgrade
|
||||
;;
|
||||
*)
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,5 +0,0 @@
|
||||
FROM coolify-base
|
||||
WORKDIR /usr/src/app
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,19 +0,0 @@
|
||||
FROM ubuntu:20.04 as binaries
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN chmod +x /usr/bin/envsubst
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
|
||||
FROM node:14 as modules
|
||||
COPY --from=binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
RUN curl -L https://pnpm.js.org/pnpm.js | node - add --global pnpm
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package*.json .
|
||||
RUN pnpm install
|
||||
|
||||
FROM modules
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
WORKDIR /usr/src/app
|
||||
RUN curl -fsSL https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz | tar -xzvf - docker/docker -C . --strip-components 1
|
||||
RUN mv /usr/src/app/docker /usr/bin/docker
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq /usr/bin/docker
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm
|
||||
COPY ./*package.json .
|
||||
RUN pnpm install
|
||||
COPY . .
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,10 +0,0 @@
|
||||
Some of the files are here for backwards compatibility.
|
||||
|
||||
I will do things after 2 months:
|
||||
|
||||
- rm ./install.js and ./update.js
|
||||
- rm ../install.sh
|
||||
- rm ./Dockerfile-base
|
||||
- rm ./obs
|
||||
- rm ./check.js "No need to check env file. During installation, it is checked by the installer. If you change it between to upgrades: 🤷♂️"
|
||||
- Rename Dockerfile-new to Dockerfile
|
||||
@@ -1,24 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 8080
|
||||
published: 8080
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --api.dashboard=true
|
||||
- --api.debug=true
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=false
|
||||
- --api.dashboard=false
|
||||
- --api.debug=false
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge=true
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web
|
||||
- --certificatesresolvers.letsencrypt.acme.email=${EMAIL}
|
||||
- --certificatesresolvers.letsencrypt.acme.storage=/data/coolify/acme.json
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /data/coolify:/data/coolify
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
coolify:
|
||||
image: coolify
|
||||
hostname: coollabs-coolify
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
command: "yarn start"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.coolify.entrypoints=websecure"
|
||||
- "traefik.http.routers.coolify.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.routers.coolify.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.services.coolify.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.coolify.middlewares=global-compress"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
if (options.check) {
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
} else {
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
shell.exec(`docker network create ${process.env.DOCKER_NETWORK} --driver overlay`, { silent: !options.debug })
|
||||
shell.exec('docker build -t coolify -f install/Dockerfile .')
|
||||
if (options.type === 'all') {
|
||||
shell.exec('docker stack rm coollabs-coolify', { silent: !options.debug })
|
||||
} else if (options.type === 'coolify') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
} else if (options.type === 'proxy') {
|
||||
shell.exec('docker service rm coollabs-coolify_proxy')
|
||||
}
|
||||
if (options.type !== 'upgrade') {
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
}
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM coolify-base-nodejs
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
@@ -1,6 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
COPY --from=coolify-binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=coolify-binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
COPY --from=coolify-binaries /usr/bin/jq /usr/bin/jq
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6
|
||||
@@ -1,9 +0,0 @@
|
||||
FROM ubuntu:20.04
|
||||
LABEL coolify-preserve=true
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
@@ -1,21 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
const options = program.opts()
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (options.type === 'upgrade') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
147
package.json
147
package.json
@@ -1,68 +1,83 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
|
||||
"version": "1.0.7",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"start": "NODE_ENV=production node api/server",
|
||||
"dev": "run-p dev:db dev:routify dev:svite dev:server",
|
||||
"dev:db": "NODE_ENV=development node api/development/mongodb.js",
|
||||
"dev:server": "nodemon -w api api/server",
|
||||
"dev:routify": "routify run",
|
||||
"dev:svite": "svite",
|
||||
"build": "run-s build:routify build:svite",
|
||||
"build:routify": "routify run -b",
|
||||
"build:svite": "svite build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@roxi/routify": "^2.15.1",
|
||||
"@zerodevx/svelte-toast": "^0.2.1",
|
||||
"ajv": "^8.1.0",
|
||||
"axios": "^0.21.1",
|
||||
"commander": "^7.2.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
"cuid": "^2.1.8",
|
||||
"dayjs": "^1.10.4",
|
||||
"deepmerge": "^4.2.2",
|
||||
"dockerode": "^3.2.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"fastify": "^3.14.2",
|
||||
"fastify-env": "^2.1.0",
|
||||
"fastify-jwt": "^2.4.0",
|
||||
"fastify-plugin": "^3.0.0",
|
||||
"fastify-static": "^4.0.1",
|
||||
"generate-password": "^1.6.0",
|
||||
"http-errors-enhanced": "^0.7.0",
|
||||
"js-yaml": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"mongoose": "^5.12.3",
|
||||
"shelljs": "^0.8.4",
|
||||
"svelte-select": "^3.17.0",
|
||||
"unique-names-generator": "^4.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mongodb-memory-server-core": "^6.9.6",
|
||||
"nodemon": "^2.0.7",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.2.9",
|
||||
"postcss-import": "^14.0.1",
|
||||
"postcss-load-config": "^3.0.1",
|
||||
"postcss-preset-env": "^6.7.0",
|
||||
"prettier": "2.2.1",
|
||||
"prettier-plugin-svelte": "^2.2.0",
|
||||
"standard": "^16.0.3",
|
||||
"svelte": "^3.37.0",
|
||||
"svelte-hmr": "^0.14.0",
|
||||
"svelte-preprocess": "^4.7.0",
|
||||
"svite": "0.8.1",
|
||||
"tailwindcss": "2.1.1"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte",
|
||||
"routify",
|
||||
"fastify",
|
||||
"tailwind"
|
||||
]
|
||||
"name": "coolify",
|
||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||
"version": "2.0.6",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"dev": "docker compose -f docker-compose-dev.yaml up -d && NODE_ENV=development svelte-kit dev --host 0.0.0.0",
|
||||
"dev:stop": "docker compose -f docker-compose-dev.yaml down",
|
||||
"dev:logs": "docker compose -f docker-compose-dev.yaml logs -f --tail 10",
|
||||
"studio": "npx prisma studio",
|
||||
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js",
|
||||
"build": "svelte-kit build",
|
||||
"preview": "svelte-kit preview",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"release:staging": "cross-var docker build -t coollabsio/coolify:$npm_package_version . && docker push coollabsio/coolify:$npm_package_version",
|
||||
"release:coolify": "cross-var yarn prerelease && docker push coollabsio/coolify:$npm_package_version && docker image push coollabsio/coolify:$npm_package_version && docker push coollabsio/coolify:latest",
|
||||
"release:haproxy": "docker build -f haproxy.Dockerfile -t coollabsio/coolify-haproxy-alpine:1.0.0 -t coollabsio/coolify-haproxy-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-alpine",
|
||||
"release:haproxy:tcp": "docker build -f haproxy-tcp.Dockerfile -t coollabsio/coolify-haproxy-tcp-alpine:1.0.0 -t coollabsio/coolify-haproxy-tcp-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-tcp-alpine",
|
||||
"release:haproxy:http": "docker build -f haproxy-http.Dockerfile -t coollabsio/coolify-haproxy-http-alpine:1.0.0 -t coollabsio/coolify-haproxy-http-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-http-alpine",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-node": "1.0.0-next.67",
|
||||
"@sveltejs/adapter-static": "1.0.0-next.27",
|
||||
"@sveltejs/kit": "1.0.0-next.259",
|
||||
"@types/bcrypt": "5.0.0",
|
||||
"@types/js-cookie": "3.0.1",
|
||||
"@types/node": "17.0.16",
|
||||
"@types/node-forge": "1.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "4.31.1",
|
||||
"@typescript-eslint/parser": "4.31.1",
|
||||
"@zerodevx/svelte-toast": "0.6.3",
|
||||
"autoprefixer": "10.4.2",
|
||||
"cross-var": "1.1.0",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-plugin-svelte3": "3.2.1",
|
||||
"husky": "7.0.4",
|
||||
"lint-staged": "12.3.3",
|
||||
"postcss": "8.4.6",
|
||||
"prettier": "2.5.1",
|
||||
"prettier-plugin-svelte": "2.6.0",
|
||||
"prettier-plugin-tailwindcss": "0.1.7",
|
||||
"prisma": "3.9.1",
|
||||
"svelte": "3.46.4",
|
||||
"svelte-check": "2.4.3",
|
||||
"svelte-preprocess": "4.10.3",
|
||||
"tailwindcss": "3.0.19",
|
||||
"ts-node": "10.5.0",
|
||||
"tslib": "2.3.1",
|
||||
"typescript": "4.5.5"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.9.1",
|
||||
"@sentry/node": "6.17.6",
|
||||
"bcrypt": "5.0.1",
|
||||
"bullmq": "1.69.0",
|
||||
"compare-versions": "4.1.3",
|
||||
"cookie": "0.4.2",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.10.7",
|
||||
"dockerode": "3.3.1",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.0.0",
|
||||
"got": "12.0.1",
|
||||
"js-cookie": "3.0.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"node-forge": "1.2.1",
|
||||
"svelte-kit-cookie-session": "2.0.3",
|
||||
"unique-names-generator": "4.6.0"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
9242
pnpm-lock.yaml
generated
9242
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
postcss.config.cjs
Normal file
6
postcss.config.cjs
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: [
|
||||
require('postcss-import'),
|
||||
require('tailwindcss'),
|
||||
require('postcss-preset-env')({ stage: 1 })
|
||||
]
|
||||
}
|
||||
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
3
prisma/migrations/migration_lock.toml
Normal file
3
prisma/migrations/migration_lock.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# Please do not edit this file manually
|
||||
# It should be added in your version-control system (i.e. Git)
|
||||
provider = "sqlite"
|
||||
300
prisma/schema.prisma
Normal file
300
prisma/schema.prisma
Normal file
@@ -0,0 +1,300 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "sqlite"
|
||||
url = env("COOLIFY_DATABASE_URL")
|
||||
}
|
||||
|
||||
model Setting {
|
||||
id String @id @default(cuid())
|
||||
fqdn String? @unique
|
||||
isRegistrationEnabled Boolean @default(false)
|
||||
proxyPassword String
|
||||
proxyUser String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @unique @default(cuid())
|
||||
email String @unique
|
||||
type String
|
||||
password String?
|
||||
teams Team[]
|
||||
permission Permission[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Permission {
|
||||
id String @id @default(cuid())
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
userId String
|
||||
team Team @relation(fields: [teamId], references: [id])
|
||||
teamId String
|
||||
permission String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Team {
|
||||
id String @id @default(cuid())
|
||||
users User[]
|
||||
name String?
|
||||
applications Application[]
|
||||
gitSources GitSource[]
|
||||
gitHubApps GithubApp[]
|
||||
gitLabApps GitlabApp[]
|
||||
destinationDocker DestinationDocker[]
|
||||
permissions Permission[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database[] @relation(fields: [databaseId], references: [id])
|
||||
databaseId String?
|
||||
service Service[] @relation(fields: [serviceId], references: [id])
|
||||
serviceId String?
|
||||
}
|
||||
|
||||
model TeamInvitation {
|
||||
id String @id @default(cuid())
|
||||
uid String
|
||||
email String
|
||||
teamId String
|
||||
teamName String
|
||||
permission String
|
||||
createdAt DateTime @default(now())
|
||||
}
|
||||
|
||||
model Application {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String? @unique
|
||||
repository String?
|
||||
configHash String?
|
||||
branch String?
|
||||
buildPack String?
|
||||
projectId Int?
|
||||
port Int?
|
||||
installCommand String?
|
||||
buildCommand String?
|
||||
startCommand String?
|
||||
baseDirectory String?
|
||||
publishDirectory String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
settings ApplicationSettings?
|
||||
teams Team[]
|
||||
destinationDockerId String?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
gitSourceId String?
|
||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||
secrets Secret[]
|
||||
}
|
||||
|
||||
model ApplicationSettings {
|
||||
id String @id @default(cuid())
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
applicationId String @unique
|
||||
debug Boolean @default(false)
|
||||
previews Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Secret {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
value String
|
||||
isBuildSecret Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
applicationId String
|
||||
|
||||
@@unique([name, applicationId])
|
||||
}
|
||||
|
||||
model BuildLog {
|
||||
id String @id @default(cuid())
|
||||
applicationId String?
|
||||
buildId String
|
||||
line String
|
||||
time Int
|
||||
}
|
||||
|
||||
model Build {
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DestinationDocker {
|
||||
id String @id @default(cuid())
|
||||
network String @unique
|
||||
name String
|
||||
engine String
|
||||
remoteEngine Boolean @default(false)
|
||||
isCoolifyProxyUsed Boolean? @default(false)
|
||||
teams Team[]
|
||||
application Application[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database[]
|
||||
service Service[]
|
||||
}
|
||||
|
||||
model GitSource {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
teams Team[]
|
||||
type String?
|
||||
apiUrl String?
|
||||
htmlUrl String?
|
||||
organization String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
githubAppId String? @unique
|
||||
githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
|
||||
application Application[]
|
||||
gitlabAppId String? @unique
|
||||
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
|
||||
}
|
||||
|
||||
model GithubApp {
|
||||
id String @id @default(cuid())
|
||||
name String? @unique
|
||||
teams Team[]
|
||||
appId Int?
|
||||
installationId Int?
|
||||
clientId String?
|
||||
clientSecret String?
|
||||
webhookSecret String?
|
||||
privateKey String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
gitSource GitSource?
|
||||
}
|
||||
|
||||
model GitlabApp {
|
||||
id String @id @default(cuid())
|
||||
oauthId Int @unique
|
||||
groupName String? @unique
|
||||
teams Team[]
|
||||
deployKeyId Int?
|
||||
privateSshKey String?
|
||||
publicSshKey String?
|
||||
webhookToken String?
|
||||
appId String?
|
||||
appSecret String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
gitSource GitSource?
|
||||
}
|
||||
|
||||
model Database {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
settings DatabaseSettings?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
destinationDockerId String?
|
||||
teams Team[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DatabaseSettings {
|
||||
id String @id @default(cuid())
|
||||
database Database @relation(fields: [databaseId], references: [id])
|
||||
databaseId String @unique
|
||||
isPublic Boolean @default(false)
|
||||
appendOnly Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Service {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String?
|
||||
type String?
|
||||
version String?
|
||||
teams Team[]
|
||||
destinationDockerId String?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
plausibleAnalytics PlausibleAnalytics?
|
||||
minio Minio?
|
||||
vscodeserver Vscodeserver?
|
||||
wordpress Wordpress?
|
||||
}
|
||||
|
||||
model PlausibleAnalytics {
|
||||
id String @id @default(cuid())
|
||||
email String?
|
||||
username String?
|
||||
password String
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
secretKeyBase String?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Minio {
|
||||
id String @id @default(cuid())
|
||||
rootUser String
|
||||
rootUserPassword String
|
||||
publicPort Int?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Vscodeserver {
|
||||
id String @id @default(cuid())
|
||||
password String
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Wordpress {
|
||||
id String @id @default(cuid())
|
||||
extraConfig String?
|
||||
tablePrefix String?
|
||||
mysqlUser String
|
||||
mysqlPassword String
|
||||
mysqlRootUser String
|
||||
mysqlRootUserPassword String
|
||||
mysqlDatabase String?
|
||||
mysqlPublicPort Int?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
80
prisma/seed.cjs
Normal file
80
prisma/seed.cjs
Normal file
@@ -0,0 +1,80 @@
|
||||
const dotEnvExtended = require('dotenv-extended');
|
||||
dotEnvExtended.load();
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const prisma = new PrismaClient();
|
||||
const crypto = require('crypto');
|
||||
const generator = require('generate-password');
|
||||
const cuid = require('cuid');
|
||||
|
||||
function generatePassword(length = 24) {
|
||||
return generator.generate({
|
||||
length,
|
||||
numbers: true,
|
||||
strict: true
|
||||
});
|
||||
}
|
||||
const algorithm = 'aes-256-ctr';
|
||||
|
||||
async function main() {
|
||||
// Enable registration for the first user
|
||||
// Set initial HAProxy password
|
||||
const settingsFound = await prisma.setting.findFirst({});
|
||||
if (!settingsFound) {
|
||||
await prisma.setting.create({
|
||||
data: {
|
||||
isRegistrationEnabled: true,
|
||||
proxyPassword: encrypt(generatePassword()),
|
||||
proxyUser: cuid()
|
||||
}
|
||||
});
|
||||
}
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine: '/var/run/docker.sock' }
|
||||
});
|
||||
if (!localDocker) {
|
||||
await prisma.destinationDocker.create({
|
||||
data: {
|
||||
engine: '/var/run/docker.sock',
|
||||
name: 'Local Docker',
|
||||
isCoolifyProxyUsed: true,
|
||||
network: 'coolify'
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
main()
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
const encrypt = (text) => {
|
||||
if (text) {
|
||||
const iv = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv);
|
||||
const encrypted = Buffer.concat([cipher.update(text), cipher.final()]);
|
||||
return JSON.stringify({
|
||||
iv: iv.toString('hex'),
|
||||
content: encrypted.toString('hex')
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const decrypt = (hashString) => {
|
||||
if (hashString) {
|
||||
const hash = JSON.parse(hashString);
|
||||
const decipher = crypto.createDecipheriv(
|
||||
algorithm,
|
||||
process.env['COOLIFY_SECRET_KEY'],
|
||||
Buffer.from(hash.iv, 'hex')
|
||||
);
|
||||
const decrpyted = Buffer.concat([
|
||||
decipher.update(Buffer.from(hash.content, 'hex')),
|
||||
decipher.final()
|
||||
]);
|
||||
return decrpyted.toString();
|
||||
}
|
||||
};
|
||||
@@ -1,3 +0,0 @@
|
||||
<script>
|
||||
window.close();
|
||||
</script>
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = {
|
||||
routifyDir: '.routify',
|
||||
dynamicImports: true,
|
||||
extensions: ['svelte']
|
||||
}
|
||||
112
scripts/install.sh
Normal file
112
scripts/install.sh
Normal file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env bash
|
||||
clear
|
||||
ARG1=$1
|
||||
WHO=$(whoami)
|
||||
APP_ID=$(cat /proc/sys/kernel/random/uuid)
|
||||
RANDOM_SECRET=$(echo $(($(date +%s%N) / 1000000)) | sha256sum | base64 | head -c 32)
|
||||
SENTRY_DSN="https://9e7a74326f29422584d2d0bebdc8b7d3@o1082494.ingest.sentry.io/6091062"
|
||||
DOCKER_MAJOR=20
|
||||
DOCKER_MINOR=10
|
||||
DOCKER_VERSION_OK="nok"
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
if [ $ARG1 ] && [ $ARG1 == "-d" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
function errorchecker() {
|
||||
exitCode=$?
|
||||
if [ $exitCode -ne "0" ]; then
|
||||
echo "$0 exited unexpectedly with status: $exitCode"
|
||||
exit $exitCode
|
||||
fi
|
||||
}
|
||||
trap 'errorchecker' EXIT
|
||||
|
||||
echo -e "Welcome to Coolify installer! \n"
|
||||
echo "This script will install all the required packages and services to run Coolify."
|
||||
echo -e "If you want to install Coolify on a different OS, please open an issue on Github to get supported version.\n\n"
|
||||
|
||||
echo -e "To see what I'm doing, please check:"
|
||||
echo -e "https://github.com/coollabsio/get.coollabs.io/blob/main/static/coolify/install_v2.sh\n\n"
|
||||
|
||||
if [ $WHO != 'root' ]; then
|
||||
echo 'Run as root please: sudo sh -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -x "$(command -v docker)" ]; then
|
||||
while true; do
|
||||
read -p "Docker Engine not found, should I install it automatically? [Yy/Nn] " yn
|
||||
case $yn in
|
||||
[Yy]*)
|
||||
sh -c "$(curl -fsSL https://get.docker.com)"
|
||||
break
|
||||
;;
|
||||
[Nn]*)
|
||||
echo "Please install docker manually and update it to the latest, but at least to $DOCKER_MAJOR.$DOCKER_MINOR"
|
||||
exit 0
|
||||
;;
|
||||
*) echo "Please answer Y or N." ;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
SERVER_VERSION=$(docker version -f "{{.Server.Version}}")
|
||||
SERVER_VERSION_MAJOR=$(echo "$SERVER_VERSION" | cut -d'.' -f 1)
|
||||
SERVER_VERSION_MINOR=$(echo "$SERVER_VERSION" | cut -d'.' -f 2)
|
||||
|
||||
if [ "$SERVER_VERSION_MAJOR" -ge "$DOCKER_MAJOR" ] &&
|
||||
[ "$SERVER_VERSION_MINOR" -ge "$DOCKER_MINOR" ]; then
|
||||
DOCKER_VERSION_OK="ok"
|
||||
fi
|
||||
|
||||
if [ $DOCKER_VERSION_OK == 'nok' ]; then
|
||||
echo "Docker version less than $DOCKER_MAJOR.$DOCKER_MINOR, please update it to at least to $DOCKER_MAJOR.$DOCKER_MINOR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Adding docker daemon configuration
|
||||
cat <<EOF >/etc/docker/daemon.json
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "100m",
|
||||
"max-file": "5"
|
||||
},
|
||||
"features": {
|
||||
"buildkit": true
|
||||
},
|
||||
"live-restore": true
|
||||
}
|
||||
EOF
|
||||
|
||||
# Restarting docker daemon
|
||||
sh -c "systemctl daemon-reload && systemctl restart docker"
|
||||
|
||||
# Downloading docker compose cli plugin
|
||||
mkdir -p ~/.docker/cli-plugins/
|
||||
curl -SL https://github.com/docker/compose/releases/download/v2.2.2/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
|
||||
chmod +x ~/.docker/cli-plugins/docker-compose
|
||||
|
||||
# Making base directory for coolify
|
||||
if [ ! -d coolify ]; then
|
||||
mkdir coolify
|
||||
fi
|
||||
|
||||
if [ -f coolify/.env ]; then
|
||||
echo -e "Coolify is already installed, using some of the existing settings."
|
||||
else
|
||||
echo "COOLIFY_APP_ID=$APP_ID
|
||||
COOLIFY_SECRET_KEY=$RANDOM_SECRET
|
||||
COOLIFY_DATABASE_URL=file:../db/prod.db
|
||||
COOLIFY_SENTRY_DSN=$SENTRY_DSN
|
||||
COOLIFY_HOSTED_ON=docker" > coolify/.env
|
||||
fi
|
||||
|
||||
cd coolify && docker run -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db-sqlite coollabsio/coolify:latest /bin/sh -c "env | grep COOLIFY > .env && docker compose up -d --force-recreate"
|
||||
|
||||
echo -e "Congratulations! Your coolify is ready to use.\n"
|
||||
echo "Please visit http://<Your Public IP Address>:3000/ to get started."
|
||||
echo "It will take a few minutes to start up, don't worry."
|
||||
114
scripts/install_podman_experiment.sh
Normal file
114
scripts/install_podman_experiment.sh
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
clear
|
||||
ARG1=$1
|
||||
WHO=$(whoami)
|
||||
APP_ID=$(cat /proc/sys/kernel/random/uuid)
|
||||
RANDOM_SECRET=$(echo $(($(date +%s%N) / 1000000)) | sha256sum | base64 | head -c 32)
|
||||
SENTRY_DSN="https://9e7a74326f29422584d2d0bebdc8b7d3@o1082494.ingest.sentry.io/6091062"
|
||||
|
||||
UBUNTU_MAJOR_MIN=20
|
||||
UBUNTU_MINOR_MIN=04
|
||||
OS_OK="nok"
|
||||
|
||||
set -eou pipefail
|
||||
|
||||
if [ $ARG1 ] && [ $ARG1 == "-d" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
function errorchecker() {
|
||||
exitCode=$?
|
||||
if [ $exitCode -ne "0" ]; then
|
||||
echo "$0 exited unexpectedly with status: $exitCode"
|
||||
exit $exitCode
|
||||
fi
|
||||
}
|
||||
trap 'errorchecker' EXIT
|
||||
|
||||
if [ $WHO != 'root' ]; then
|
||||
echo 'Run as root please: sudo sh -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
. /etc/lsb-release
|
||||
if [ $DISTRIB_ID != 'Ubuntu' ]; then
|
||||
echo 'Not supported OS, please open an issue on Github to get supported version.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DISTRIB_RELEASE_MAJOR=$(echo "$DISTRIB_RELEASE" | cut -d'.' -f 1)
|
||||
DISTRIB_RELEASE_MINOR=$(echo "$DISTRIB_RELEASE" | cut -d'.' -f 2)
|
||||
|
||||
if [ "$DISTRIB_RELEASE_MAJOR" -ge "$UBUNTU_MAJOR_MIN" ] &&
|
||||
[ "$DISTRIB_RELEASE_MINOR" -ge "$UBUNTU_MINOR_MIN" ]; then
|
||||
OS_OK="ok"
|
||||
fi
|
||||
|
||||
if [ $OS_OK == 'nok' ]; then
|
||||
echo "Ubuntu version less than $UBUNTU_MAJOR_MIN.$UBUNTU_MINOR_MIN."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function installPodman() {
|
||||
apt-get update -y
|
||||
apt-get install curl wget gnupg2 -y
|
||||
if [ "$DISTRIB_RELEASE_MAJOR" -eq "20" ] && [ "$DISTRIB_RELEASE_MINOR" -eq "04" ]; then
|
||||
echo 'Installing on 20.04'
|
||||
source /etc/os-release
|
||||
sh -c "echo 'deb http://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_${VERSION_ID}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list"
|
||||
wget -nv https://download.opensuse.org/repositories/devel:kubic:libcontainers:stable/xUbuntu_${VERSION_ID}/Release.key -O- | apt-key add -
|
||||
apt-get update -y
|
||||
apt-get -y install podman
|
||||
return 0
|
||||
elif [ "$DISTRIB_RELEASE_MAJOR" -eq "20" ] && [ "$DISTRIB_RELEASE_MINOR" -eq "10" ]; then
|
||||
apt-get -y install podman
|
||||
return 0
|
||||
elif [ "$DISTRIB_RELEASE_MAJOR" -gt "20" ]; then
|
||||
apt-get -y install podman
|
||||
return 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
if [ ! -x "$(command -v podman)" ]; then
|
||||
while true; do
|
||||
read -p "Podman not found, should I install it automatically? [Yy/Nn] " yn
|
||||
case $yn in
|
||||
[Yy]*)
|
||||
installPodman
|
||||
break
|
||||
;;
|
||||
[Nn]*)
|
||||
echo "Please install docker manually and update it to the latest, but at least to $DOCKER_MAJOR.$DOCKER_MINOR"
|
||||
exit 0
|
||||
;;
|
||||
*) echo "Please answer Yy or Nn." ;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
# Making base directory for coolify
|
||||
if [ ! -d coolify ]; then
|
||||
mkdir coolify
|
||||
fi
|
||||
|
||||
echo "COOLIFY_APP_ID=$APP_ID
|
||||
COOLIFY_SECRET_KEY=$RANDOM_SECRET
|
||||
COOLIFY_DATABASE_URL=file:../db/prod.db
|
||||
COOLIFY_SENTRY_DSN=$SENTRY_DSN
|
||||
COOLIFY_HOSTED_ON=docker" >coolify/.env
|
||||
|
||||
systemctl start podman.socket
|
||||
systemctl enable podman.socket
|
||||
|
||||
podman volume create coolify-db
|
||||
podman volume create coolify-ssl-certs
|
||||
podman volume create coolify-letsencrypt
|
||||
|
||||
|
||||
cd coolify && podman run --privileged -tid --env-file .env -v /var/run/podman/podman.sock:/var/run/podman/podman.sock -v coolify-db-sqlite:/app/db docker.io/coollabsio/coolify:latest /bin/sh -c "env | grep COOLIFY > .env && docker-compose up -d --force-recreate"
|
||||
echo "Done"
|
||||
exit 0
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user