commit
adf82c04ad
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
name: pocketbase-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "others/pocketbase"
|
||||||
|
- ".github/workflows/pocketbase-release.yml"
|
||||||
|
branches:
|
||||||
|
- next
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
arm64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-arm64
|
||||||
|
amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-amd64
|
||||||
|
aarch64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/aarch64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-aarch64
|
||||||
|
merge-manifest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [amd64, arm64, aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Create & publish manifest
|
||||||
|
run: |
|
||||||
|
docker manifest create coollabsio/pocketbase:0.8.0 --amend coollabsio/pocketbase:0.8.0-amd64 --amend coollabsio/pocketbase:0.8.0-arm64 --amend coollabsio/pocketbase:0.8.0-aarch64
|
||||||
|
docker manifest push coollabsio/pocketbase:0.8.0
|
4
.github/workflows/staging-release.yml
vendored
4
.github/workflows/staging-release.yml
vendored
@ -3,9 +3,11 @@ name: staging-release
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- '**'
|
- "**"
|
||||||
- "!others/fluentbit"
|
- "!others/fluentbit"
|
||||||
|
- "!others/pocketbase"
|
||||||
- "!.github/workflows/fluent-bit-release.yml"
|
- "!.github/workflows/fluent-bit-release.yml"
|
||||||
|
- "!.github/workflows/pocketbase-release.yml"
|
||||||
branches:
|
branches:
|
||||||
- next
|
- next
|
||||||
|
|
||||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -10,8 +10,11 @@ package
|
|||||||
dist
|
dist
|
||||||
client
|
client
|
||||||
apps/api/db/*.db
|
apps/api/db/*.db
|
||||||
local-serve
|
|
||||||
apps/api/db/migration.db-journal
|
apps/api/db/migration.db-journal
|
||||||
apps/api/core*
|
apps/api/core*
|
||||||
|
apps/backup/backups/*
|
||||||
|
!apps/backup/backups/.gitkeep
|
||||||
logs
|
logs
|
||||||
others/certificates
|
others/certificates
|
||||||
|
backups/*
|
||||||
|
!backups/.gitkeep
|
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@ -1,11 +1,22 @@
|
|||||||
{
|
{
|
||||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
"i18n-ally.localesPaths": [
|
||||||
|
"src/lib/locales"
|
||||||
|
],
|
||||||
"i18n-ally.keystyle": "nested",
|
"i18n-ally.keystyle": "nested",
|
||||||
"i18n-ally.extract.ignoredByFiles": {
|
"i18n-ally.extract.ignoredByFiles": {
|
||||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
"src\\routes\\__layout.svelte": [
|
||||||
|
"Coolify",
|
||||||
|
"coolLabs logo"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"i18n-ally.sourceLanguage": "en",
|
"i18n-ally.sourceLanguage": "en",
|
||||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
"i18n-ally.enabledFrameworks": [
|
||||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
"svelte"
|
||||||
|
],
|
||||||
|
"i18n-ally.enabledParsers": [
|
||||||
|
"js",
|
||||||
|
"ts",
|
||||||
|
"json"
|
||||||
|
],
|
||||||
"i18n-ally.extract.autoDetect": true
|
"i18n-ally.extract.autoDetect": true
|
||||||
}
|
}
|
@ -34,7 +34,7 @@ ## 3) Setup Coolify
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Or... Copy and paste commands bellow:
|
# Or... Copy and paste commands bellow:
|
||||||
cp apps/api/.env.example apps/api.env
|
cp apps/api/.env.example apps/api/.env
|
||||||
pnpm install
|
pnpm install
|
||||||
pnpm db:push
|
pnpm db:push
|
||||||
pnpm db:seed
|
pnpm db:seed
|
||||||
|
25
README.md
25
README.md
@ -77,6 +77,7 @@ ### Databases
|
|||||||
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
||||||
|
|
||||||
### Services
|
### Services
|
||||||
|
|
||||||
- [Appwrite](https://appwrite.io)
|
- [Appwrite](https://appwrite.io)
|
||||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||||
- [Ghost](https://ghost.org)
|
- [Ghost](https://ghost.org)
|
||||||
@ -103,11 +104,29 @@ ## Support
|
|||||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||||
- Discord: [Invitation](https://coollabs.io/discord)
|
- Discord: [Invitation](https://coollabs.io/discord)
|
||||||
|
|
||||||
## Development Contributions
|
---
|
||||||
|
|
||||||
Coolify is developed under the Apache License and you can help to make it grow → [Start coding!](./CONTRIBUTION.md)
|
## ⚗️ Expertise Contributions
|
||||||
|
|
||||||
## Financial Contributors
|
Coolify is developed under the [Apache License](./LICENSE) and you can help to make it grow.
|
||||||
|
Our community will be glad to have you on board!
|
||||||
|
|
||||||
|
Learn how to contribute to Coolify as as ...
|
||||||
|
|
||||||
|
→ [👩🏾💻 Software developer](./CONTRIBUTION.md)
|
||||||
|
|
||||||
|
→ [🧑🏻🏫 Translator](./docs/contribution/Translating.md)
|
||||||
|
|
||||||
|
<!--
|
||||||
|
→ 🧑🏽🎨 Designer
|
||||||
|
→ 🙋♀️ Community Managemer
|
||||||
|
→ 🧙🏻♂️ Text Content Creator
|
||||||
|
→ 👨🏼🎤 Video Content Creator
|
||||||
|
-->
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 💰 Financial Contributors
|
||||||
|
|
||||||
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
|
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
|
||||||
|
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
COOLIFY_APP_ID=local-dev
|
COOLIFY_APP_ID=local-dev
|
||||||
# 32 bits long secret key
|
# 32 bits long secret key
|
||||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||||
COOLIFY_SENTRY_DSN=
|
|
||||||
|
COOLIFY_IS_ON=docker
|
||||||
COOLIFY_IS_ON=docker
|
COOLIFY_WHITE_LABELED=false
|
||||||
COOLIFY_WHITE_LABELED=false
|
COOLIFY_WHITE_LABELED_ICON=
|
||||||
COOLIFY_WHITE_LABELED_ICON=
|
COOLIFY_AUTO_UPDATE=
|
||||||
COOLIFY_AUTO_UPDATE=
|
|
||||||
|
File diff suppressed because one or more lines are too long
@ -1,3 +1,161 @@
|
|||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: '0.8.0'
|
||||||
|
documentation: https://pocketbase.io/docs/
|
||||||
|
type: pocketbase
|
||||||
|
name: Pocketbase
|
||||||
|
description: "Open Source realtime backend in 1 file"
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
image: coollabsio/pocketbase:$$core_version
|
||||||
|
volumes:
|
||||||
|
- $$id-data:/app/pb_data
|
||||||
|
ports:
|
||||||
|
- "8080"
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: 1.5.0-rc.0
|
||||||
|
documentation: https://plausible.io/doc/
|
||||||
|
type: plausibleanalytics-arm
|
||||||
|
name: Plausible Analytics (ARM)
|
||||||
|
description: A lightweight and open-source website analytics tool.
|
||||||
|
labels:
|
||||||
|
- analytics
|
||||||
|
- statistics
|
||||||
|
- plausible
|
||||||
|
- gdpr
|
||||||
|
- no-cookie
|
||||||
|
- google analytics
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Plausible Analytics
|
||||||
|
command: >-
|
||||||
|
sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db migrate
|
||||||
|
&& /entrypoint.sh db init-admin && /entrypoint.sh run"
|
||||||
|
depends_on:
|
||||||
|
- $$id-postgresql
|
||||||
|
- $$id-clickhouse
|
||||||
|
image: plausible/analytics:$$core_version
|
||||||
|
environment:
|
||||||
|
- ADMIN_USER_EMAIL=$$config_admin_user_email
|
||||||
|
- ADMIN_USER_NAME=$$config_admin_user_name
|
||||||
|
- ADMIN_USER_PWD=$$secret_admin_user_pwd
|
||||||
|
- BASE_URL=$$config_base_url
|
||||||
|
- SECRET_KEY_BASE=$$secret_secret_key_base
|
||||||
|
- DISABLE_AUTH=$$config_disable_auth
|
||||||
|
- DISABLE_REGISTRATION=$$config_disable_registration
|
||||||
|
- DATABASE_URL=$$secret_database_url
|
||||||
|
- CLICKHOUSE_DATABASE_URL=$$secret_clickhouse_database_url
|
||||||
|
ports:
|
||||||
|
- "8000"
|
||||||
|
$$id-postgresql:
|
||||||
|
name: PostgreSQL
|
||||||
|
image: postgres:14-alpine
|
||||||
|
volumes:
|
||||||
|
- $$id-postgresql-data:/var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=$$secret_postgres_password
|
||||||
|
- POSTGRES_USER=$$config_postgres_user
|
||||||
|
- POSTGRES_DB=$$config_postgres_db
|
||||||
|
$$id-clickhouse:
|
||||||
|
name: Clickhouse
|
||||||
|
volumes:
|
||||||
|
- $$id-clickhouse-data:/var/lib/clickhouse
|
||||||
|
image: clickhouse/clickhouse-server:22.6-alpine
|
||||||
|
ulimits:
|
||||||
|
nofile:
|
||||||
|
soft: 262144
|
||||||
|
hard: 262144
|
||||||
|
files:
|
||||||
|
- location: /etc/clickhouse-server/users.d/logging.xml
|
||||||
|
content: >-
|
||||||
|
<yandex><logger><level>warning</level><console>true</console></logger><query_thread_log
|
||||||
|
remove="remove"/><query_log remove="remove"/><text_log
|
||||||
|
remove="remove"/><trace_log remove="remove"/><metric_log
|
||||||
|
remove="remove"/><asynchronous_metric_log
|
||||||
|
remove="remove"/><session_log remove="remove"/><part_log
|
||||||
|
remove="remove"/></yandex>
|
||||||
|
- location: /etc/clickhouse-server/config.d/logging.xml
|
||||||
|
content: >-
|
||||||
|
<yandex><profiles><default><log_queries>0</log_queries><log_query_threads>0</log_query_threads></default></profiles></yandex>
|
||||||
|
- location: /docker-entrypoint-initdb.d/init.query
|
||||||
|
content: CREATE DATABASE IF NOT EXISTS plausible;
|
||||||
|
- location: /docker-entrypoint-initdb.d/init-db.sh
|
||||||
|
content: >-
|
||||||
|
clickhouse client --queries-file
|
||||||
|
/docker-entrypoint-initdb.d/init.query
|
||||||
|
variables:
|
||||||
|
- id: $$config_base_url
|
||||||
|
name: BASE_URL
|
||||||
|
label: Base URL
|
||||||
|
defaultValue: $$generate_fqdn
|
||||||
|
description: >-
|
||||||
|
You must set this to the FQDN of the Plausible Analytics instance. This is
|
||||||
|
used to generate the links to the Plausible Analytics instance.
|
||||||
|
- id: $$secret_database_url
|
||||||
|
name: DATABASE_URL
|
||||||
|
label: Database URL for PostgreSQL
|
||||||
|
defaultValue: >-
|
||||||
|
postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_clickhouse_database_url
|
||||||
|
name: CLICKHOUSE_DATABASE_URL
|
||||||
|
label: Database URL for Clickhouse
|
||||||
|
defaultValue: http://$$id-clickhouse:8123/plausible
|
||||||
|
description: ""
|
||||||
|
- id: $$config_admin_user_email
|
||||||
|
name: ADMIN_USER_EMAIL
|
||||||
|
label: Admin Email Address
|
||||||
|
defaultValue: admin@example.com
|
||||||
|
description: This is the admin email. Please change it.
|
||||||
|
- id: $$config_admin_user_name
|
||||||
|
name: ADMIN_USER_NAME
|
||||||
|
label: Admin User Name
|
||||||
|
defaultValue: $$generate_username
|
||||||
|
description: This is the admin username. Please change it.
|
||||||
|
- id: $$secret_admin_user_pwd
|
||||||
|
name: ADMIN_USER_PWD
|
||||||
|
label: Admin User Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: This is the admin password. Please change it.
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$secret_secret_key_base
|
||||||
|
name: SECRET_KEY_BASE
|
||||||
|
label: Secret Key Base
|
||||||
|
defaultValue: $$generate_hex(64)
|
||||||
|
description: ""
|
||||||
|
- id: $$config_disable_auth
|
||||||
|
name: DISABLE_AUTH
|
||||||
|
label: Disable Authentication
|
||||||
|
defaultValue: "false"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_disable_registration
|
||||||
|
name: DISABLE_REGISTRATION
|
||||||
|
label: Disable Registration
|
||||||
|
defaultValue: "true"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_postgres_user
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_USER
|
||||||
|
label: PostgreSQL Username
|
||||||
|
defaultValue: postgresql
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_postgres_password
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_PASSWORD
|
||||||
|
label: PostgreSQL Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$config_postgres_db
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_DB
|
||||||
|
label: PostgreSQL Database
|
||||||
|
defaultValue: plausible
|
||||||
|
description: ""
|
||||||
|
- id: $$config_scriptName
|
||||||
|
name: SCRIPT_NAME
|
||||||
|
label: Custom Script Name
|
||||||
|
defaultValue: plausible.js
|
||||||
|
description: This is the default script name.
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: "1.17"
|
defaultVersion: "1.17"
|
||||||
documentation: https://docs.gitea.io
|
documentation: https://docs.gitea.io
|
||||||
@ -2317,6 +2475,7 @@
|
|||||||
ignore: true
|
ignore: true
|
||||||
defaultVersion: latest
|
defaultVersion: latest
|
||||||
documentation: https://docs.ghost.org
|
documentation: https://docs.ghost.org
|
||||||
|
arch: amd64
|
||||||
type: ghost-mariadb
|
type: ghost-mariadb
|
||||||
name: Ghost
|
name: Ghost
|
||||||
subname: (MariaDB)
|
subname: (MariaDB)
|
||||||
@ -2979,6 +3138,7 @@
|
|||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: stable
|
defaultVersion: stable
|
||||||
documentation: https://plausible.io/doc/
|
documentation: https://plausible.io/doc/
|
||||||
|
arch: amd64
|
||||||
type: plausibleanalytics
|
type: plausibleanalytics
|
||||||
name: Plausible Analytics
|
name: Plausible Analytics
|
||||||
description: A lightweight and open-source website analytics tool.
|
description: A lightweight and open-source website analytics tool.
|
||||||
@ -3013,7 +3173,7 @@
|
|||||||
- "8000"
|
- "8000"
|
||||||
$$id-postgresql:
|
$$id-postgresql:
|
||||||
name: PostgreSQL
|
name: PostgreSQL
|
||||||
image: "bitnami/postgresql:13.2.0"
|
image: "bitnami/postgresql:13"
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-postgresql-data:/bitnami/postgresql"
|
- "$$id-postgresql-data:/bitnami/postgresql"
|
||||||
environment:
|
environment:
|
||||||
@ -3024,7 +3184,7 @@
|
|||||||
name: Clickhouse
|
name: Clickhouse
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-clickhouse-data:/var/lib/clickhouse"
|
- "$$id-clickhouse-data:/var/lib/clickhouse"
|
||||||
image: "yandex/clickhouse-server:21.3.2.5"
|
image: "clickhouse/clickhouse-server:22.6-alpine"
|
||||||
ulimits:
|
ulimits:
|
||||||
nofile:
|
nofile:
|
||||||
soft: 262144
|
soft: 262144
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
{
|
{
|
||||||
"watch": ["src"],
|
"watch": [
|
||||||
"ignore": ["src/**/*.test.ts"],
|
"src"
|
||||||
"ext": "ts,mjs,json,graphql",
|
],
|
||||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
|
"ignore": [
|
||||||
"legacyWatch": true
|
"src/**/*.test.ts"
|
||||||
}
|
],
|
||||||
|
"ext": "ts,mjs,json,graphql",
|
||||||
|
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --platform=node --outdir=build --format=cjs && node build",
|
||||||
|
"legacyWatch": true
|
||||||
|
}
|
@ -9,7 +9,7 @@
|
|||||||
"db:studio": "prisma studio",
|
"db:studio": "prisma studio",
|
||||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||||
"dev": "nodemon",
|
"dev": "nodemon",
|
||||||
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --minify=true --platform=node --outdir=build --format=cjs",
|
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||||
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||||
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||||
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
|
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
|
||||||
@ -26,9 +26,11 @@
|
|||||||
"@iarna/toml": "2.2.5",
|
"@iarna/toml": "2.2.5",
|
||||||
"@ladjs/graceful": "3.0.2",
|
"@ladjs/graceful": "3.0.2",
|
||||||
"@prisma/client": "4.6.1",
|
"@prisma/client": "4.6.1",
|
||||||
|
"@sentry/node": "7.21.1",
|
||||||
|
"@sentry/tracing": "7.21.1",
|
||||||
|
"axe": "11.0.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bree": "9.1.2",
|
"bree": "9.1.2",
|
||||||
"axe":"11.0.0",
|
|
||||||
"cabin": "11.0.1",
|
"cabin": "11.0.1",
|
||||||
"compare-versions": "5.0.1",
|
"compare-versions": "5.0.1",
|
||||||
"csv-parse": "5.3.2",
|
"csv-parse": "5.3.2",
|
||||||
@ -47,6 +49,7 @@
|
|||||||
"is-port-reachable": "4.0.0",
|
"is-port-reachable": "4.0.0",
|
||||||
"js-yaml": "4.1.0",
|
"js-yaml": "4.1.0",
|
||||||
"jsonwebtoken": "8.5.1",
|
"jsonwebtoken": "8.5.1",
|
||||||
|
"minimist": "^1.2.7",
|
||||||
"node-forge": "1.3.1",
|
"node-forge": "1.3.1",
|
||||||
"node-os-utils": "1.3.7",
|
"node-os-utils": "1.3.7",
|
||||||
"p-all": "4.0.0",
|
"p-all": "4.0.0",
|
||||||
@ -54,6 +57,7 @@
|
|||||||
"prisma": "4.6.1",
|
"prisma": "4.6.1",
|
||||||
"public-ip": "6.0.1",
|
"public-ip": "6.0.1",
|
||||||
"pump": "3.0.0",
|
"pump": "3.0.0",
|
||||||
|
"shell-quote": "^1.7.4",
|
||||||
"socket.io": "4.5.3",
|
"socket.io": "4.5.3",
|
||||||
"ssh-config": "4.1.6",
|
"ssh-config": "4.1.6",
|
||||||
"strip-ansi": "7.0.1",
|
"strip-ansi": "7.0.1",
|
||||||
|
@ -0,0 +1,37 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `proxyHash` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
- You are about to drop the column `proxyPassword` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
- You are about to drop the column `proxyUser` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,59 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "DockerRegistry" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"username" TEXT,
|
||||||
|
"password" TEXT,
|
||||||
|
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"teamId" TEXT,
|
||||||
|
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Application" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"repository" TEXT,
|
||||||
|
"configHash" TEXT,
|
||||||
|
"branch" TEXT,
|
||||||
|
"buildPack" TEXT,
|
||||||
|
"projectId" INTEGER,
|
||||||
|
"port" INTEGER,
|
||||||
|
"exposePort" INTEGER,
|
||||||
|
"installCommand" TEXT,
|
||||||
|
"buildCommand" TEXT,
|
||||||
|
"startCommand" TEXT,
|
||||||
|
"baseDirectory" TEXT,
|
||||||
|
"publishDirectory" TEXT,
|
||||||
|
"deploymentType" TEXT,
|
||||||
|
"phpModules" TEXT,
|
||||||
|
"pythonWSGI" TEXT,
|
||||||
|
"pythonModule" TEXT,
|
||||||
|
"pythonVariable" TEXT,
|
||||||
|
"dockerFileLocation" TEXT,
|
||||||
|
"denoMainFile" TEXT,
|
||||||
|
"denoOptions" TEXT,
|
||||||
|
"dockerComposeFile" TEXT,
|
||||||
|
"dockerComposeFileLocation" TEXT,
|
||||||
|
"dockerComposeConfiguration" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"destinationDockerId" TEXT,
|
||||||
|
"gitSourceId" TEXT,
|
||||||
|
"baseImage" TEXT,
|
||||||
|
"baseBuildImage" TEXT,
|
||||||
|
"dockerRegistryId" TEXT NOT NULL DEFAULT '0',
|
||||||
|
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||||
|
DROP TABLE "Application";
|
||||||
|
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,30 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,60 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", coalesce("isAPIDebuggingEnabled", false) AS "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
CREATE TABLE "new_GlitchTip" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"postgresqlUser" TEXT NOT NULL,
|
||||||
|
"postgresqlPassword" TEXT NOT NULL,
|
||||||
|
"postgresqlDatabase" TEXT NOT NULL,
|
||||||
|
"postgresqlPublicPort" INTEGER,
|
||||||
|
"secretKeyBase" TEXT,
|
||||||
|
"defaultEmail" TEXT NOT NULL,
|
||||||
|
"defaultUsername" TEXT NOT NULL,
|
||||||
|
"defaultPassword" TEXT NOT NULL,
|
||||||
|
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
|
||||||
|
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
|
||||||
|
"emailSmtpPort" INTEGER DEFAULT 25,
|
||||||
|
"emailSmtpUser" TEXT,
|
||||||
|
"emailSmtpPassword" TEXT,
|
||||||
|
"emailSmtpUseTls" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"emailSmtpUseSsl" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"emailBackend" TEXT,
|
||||||
|
"mailgunApiKey" TEXT,
|
||||||
|
"sendgridApiKey" TEXT,
|
||||||
|
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"serviceId" TEXT NOT NULL,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_GlitchTip" ("createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUseSsl", "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt") SELECT "createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", coalesce("emailSmtpUseSsl", false) AS "emailSmtpUseSsl", coalesce("emailSmtpUseTls", false) AS "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt" FROM "GlitchTip";
|
||||||
|
DROP TABLE "GlitchTip";
|
||||||
|
ALTER TABLE "new_GlitchTip" RENAME TO "GlitchTip";
|
||||||
|
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Setting" ADD COLUMN "sentryDSN" TEXT;
|
@ -0,0 +1,31 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"sentryDSN" TEXT,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT coalesce("DNSServers", '1.1.1.1,8.8.8.8') AS "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,33 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"numberOfDockerImagesKeptLocally" INTEGER NOT NULL DEFAULT 3,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"sentryDSN" TEXT,
|
||||||
|
"previewSeparator" TEXT NOT NULL DEFAULT '.',
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "gitCommitHash" TEXT;
|
@ -0,0 +1,66 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `isSystemWide` on the `DockerRegistry` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_DockerRegistry" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"username" TEXT,
|
||||||
|
"password" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"teamId" TEXT,
|
||||||
|
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_DockerRegistry" ("createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username") SELECT "createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username" FROM "DockerRegistry";
|
||||||
|
DROP TABLE "DockerRegistry";
|
||||||
|
ALTER TABLE "new_DockerRegistry" RENAME TO "DockerRegistry";
|
||||||
|
CREATE TABLE "new_Application" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"repository" TEXT,
|
||||||
|
"configHash" TEXT,
|
||||||
|
"branch" TEXT,
|
||||||
|
"buildPack" TEXT,
|
||||||
|
"projectId" INTEGER,
|
||||||
|
"port" INTEGER,
|
||||||
|
"exposePort" INTEGER,
|
||||||
|
"installCommand" TEXT,
|
||||||
|
"buildCommand" TEXT,
|
||||||
|
"startCommand" TEXT,
|
||||||
|
"baseDirectory" TEXT,
|
||||||
|
"publishDirectory" TEXT,
|
||||||
|
"deploymentType" TEXT,
|
||||||
|
"phpModules" TEXT,
|
||||||
|
"pythonWSGI" TEXT,
|
||||||
|
"pythonModule" TEXT,
|
||||||
|
"pythonVariable" TEXT,
|
||||||
|
"dockerFileLocation" TEXT,
|
||||||
|
"denoMainFile" TEXT,
|
||||||
|
"denoOptions" TEXT,
|
||||||
|
"dockerComposeFile" TEXT,
|
||||||
|
"dockerComposeFileLocation" TEXT,
|
||||||
|
"dockerComposeConfiguration" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"destinationDockerId" TEXT,
|
||||||
|
"gitSourceId" TEXT,
|
||||||
|
"gitCommitHash" TEXT,
|
||||||
|
"baseImage" TEXT,
|
||||||
|
"baseBuildImage" TEXT,
|
||||||
|
"dockerRegistryId" TEXT,
|
||||||
|
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||||
|
DROP TABLE "Application";
|
||||||
|
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "simpleDockerfile" TEXT;
|
@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "dockerRegistryImageName" TEXT;
|
@ -21,26 +21,27 @@ model Certificate {
|
|||||||
model Setting {
|
model Setting {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
fqdn String? @unique
|
fqdn String? @unique
|
||||||
isAPIDebuggingEnabled Boolean? @default(false)
|
|
||||||
isRegistrationEnabled Boolean @default(false)
|
|
||||||
dualCerts Boolean @default(false)
|
dualCerts Boolean @default(false)
|
||||||
minPort Int @default(9000)
|
minPort Int @default(9000)
|
||||||
maxPort Int @default(9100)
|
maxPort Int @default(9100)
|
||||||
proxyPassword String
|
DNSServers String @default("1.1.1.1,8.8.8.8")
|
||||||
proxyUser String
|
|
||||||
proxyHash String?
|
|
||||||
proxyDefaultRedirect String?
|
|
||||||
isAutoUpdateEnabled Boolean @default(false)
|
|
||||||
isDNSCheckEnabled Boolean @default(true)
|
|
||||||
DNSServers String?
|
|
||||||
isTraefikUsed Boolean @default(true)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
ipv4 String?
|
ipv4 String?
|
||||||
ipv6 String?
|
ipv6 String?
|
||||||
arch String?
|
arch String?
|
||||||
concurrentBuilds Int @default(1)
|
concurrentBuilds Int @default(1)
|
||||||
applicationStoragePathMigrationFinished Boolean @default(false)
|
applicationStoragePathMigrationFinished Boolean @default(false)
|
||||||
|
numberOfDockerImagesKeptLocally Int @default(3)
|
||||||
|
proxyDefaultRedirect String?
|
||||||
|
doNotTrack Boolean @default(false)
|
||||||
|
sentryDSN String?
|
||||||
|
previewSeparator String @default(".")
|
||||||
|
isAPIDebuggingEnabled Boolean @default(false)
|
||||||
|
isRegistrationEnabled Boolean @default(true)
|
||||||
|
isAutoUpdateEnabled Boolean @default(false)
|
||||||
|
isDNSCheckEnabled Boolean @default(true)
|
||||||
|
isTraefikUsed Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
model User {
|
model User {
|
||||||
@ -83,6 +84,7 @@ model Team {
|
|||||||
service Service[]
|
service Service[]
|
||||||
users User[]
|
users User[]
|
||||||
certificate Certificate[]
|
certificate Certificate[]
|
||||||
|
dockerRegistry DockerRegistry[]
|
||||||
}
|
}
|
||||||
|
|
||||||
model TeamInvitation {
|
model TeamInvitation {
|
||||||
@ -96,7 +98,7 @@ model TeamInvitation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model Application {
|
model Application {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String
|
name String
|
||||||
fqdn String?
|
fqdn String?
|
||||||
repository String?
|
repository String?
|
||||||
@ -122,20 +124,26 @@ model Application {
|
|||||||
dockerComposeFile String?
|
dockerComposeFile String?
|
||||||
dockerComposeFileLocation String?
|
dockerComposeFileLocation String?
|
||||||
dockerComposeConfiguration String?
|
dockerComposeConfiguration String?
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
destinationDockerId String?
|
destinationDockerId String?
|
||||||
gitSourceId String?
|
gitSourceId String?
|
||||||
|
gitCommitHash String?
|
||||||
baseImage String?
|
baseImage String?
|
||||||
baseBuildImage String?
|
baseBuildImage String?
|
||||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
|
||||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
|
||||||
persistentStorage ApplicationPersistentStorage[]
|
|
||||||
settings ApplicationSettings?
|
settings ApplicationSettings?
|
||||||
secrets Secret[]
|
dockerRegistryId String?
|
||||||
teams Team[]
|
dockerRegistryImageName String?
|
||||||
connectedDatabase ApplicationConnectedDatabase?
|
simpleDockerfile String?
|
||||||
previewApplication PreviewApplication[]
|
|
||||||
|
persistentStorage ApplicationPersistentStorage[]
|
||||||
|
secrets Secret[]
|
||||||
|
teams Team[]
|
||||||
|
connectedDatabase ApplicationConnectedDatabase?
|
||||||
|
previewApplication PreviewApplication[]
|
||||||
|
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||||
|
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||||
|
dockerRegistry DockerRegistry? @relation(fields: [dockerRegistryId], references: [id])
|
||||||
}
|
}
|
||||||
|
|
||||||
model PreviewApplication {
|
model PreviewApplication {
|
||||||
@ -296,6 +304,19 @@ model SshKey {
|
|||||||
destinationDocker DestinationDocker[]
|
destinationDocker DestinationDocker[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model DockerRegistry {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
name String
|
||||||
|
url String
|
||||||
|
username String?
|
||||||
|
password String?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
teamId String?
|
||||||
|
team Team? @relation(fields: [teamId], references: [id])
|
||||||
|
application Application[]
|
||||||
|
}
|
||||||
|
|
||||||
model GitSource {
|
model GitSource {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String
|
name String
|
||||||
@ -626,8 +647,8 @@ model GlitchTip {
|
|||||||
emailSmtpPort Int? @default(25)
|
emailSmtpPort Int? @default(25)
|
||||||
emailSmtpUser String?
|
emailSmtpUser String?
|
||||||
emailSmtpPassword String?
|
emailSmtpPassword String?
|
||||||
emailSmtpUseTls Boolean? @default(false)
|
emailSmtpUseTls Boolean @default(false)
|
||||||
emailSmtpUseSsl Boolean? @default(false)
|
emailSmtpUseSsl Boolean @default(false)
|
||||||
emailBackend String?
|
emailBackend String?
|
||||||
mailgunApiKey String?
|
mailgunApiKey String?
|
||||||
sendgridApiKey String?
|
sendgridApiKey String?
|
||||||
|
@ -1,18 +1,8 @@
|
|||||||
const dotEnvExtended = require('dotenv-extended');
|
const dotEnvExtended = require('dotenv-extended');
|
||||||
dotEnvExtended.load();
|
dotEnvExtended.load();
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const generator = require('generate-password');
|
|
||||||
const cuid = require('cuid');
|
|
||||||
const { PrismaClient } = require('@prisma/client');
|
const { PrismaClient } = require('@prisma/client');
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
function generatePassword(length = 24) {
|
|
||||||
return generator.generate({
|
|
||||||
length,
|
|
||||||
numbers: true,
|
|
||||||
strict: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const algorithm = 'aes-256-ctr';
|
const algorithm = 'aes-256-ctr';
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
@ -21,11 +11,8 @@ async function main() {
|
|||||||
if (!settingsFound) {
|
if (!settingsFound) {
|
||||||
await prisma.setting.create({
|
await prisma.setting.create({
|
||||||
data: {
|
data: {
|
||||||
isRegistrationEnabled: true,
|
id: '0',
|
||||||
proxyPassword: encrypt(generatePassword()),
|
|
||||||
proxyUser: cuid(),
|
|
||||||
arch: process.arch,
|
arch: process.arch,
|
||||||
DNSServers: '1.1.1.1,8.8.8.8'
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -34,11 +21,11 @@ async function main() {
|
|||||||
id: settingsFound.id
|
id: settingsFound.id
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
isTraefikUsed: true,
|
id: '0'
|
||||||
proxyHash: null
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Create local docker engine
|
||||||
const localDocker = await prisma.destinationDocker.findFirst({
|
const localDocker = await prisma.destinationDocker.findFirst({
|
||||||
where: { engine: '/var/run/docker.sock' }
|
where: { engine: '/var/run/docker.sock' }
|
||||||
});
|
});
|
||||||
@ -55,23 +42,18 @@ async function main() {
|
|||||||
|
|
||||||
// Set auto-update based on env variable
|
// Set auto-update based on env variable
|
||||||
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
||||||
const settings = await prisma.setting.findFirst({});
|
await prisma.setting.update({
|
||||||
if (settings) {
|
where: {
|
||||||
await prisma.setting.update({
|
id: '0'
|
||||||
where: {
|
},
|
||||||
id: settings.id
|
data: {
|
||||||
},
|
isAutoUpdateEnabled
|
||||||
data: {
|
}
|
||||||
isAutoUpdateEnabled
|
});
|
||||||
}
|
// Create public github source
|
||||||
});
|
|
||||||
}
|
|
||||||
const github = await prisma.gitSource.findFirst({
|
const github = await prisma.gitSource.findFirst({
|
||||||
where: { htmlUrl: 'https://github.com', forPublic: true }
|
where: { htmlUrl: 'https://github.com', forPublic: true }
|
||||||
});
|
});
|
||||||
const gitlab = await prisma.gitSource.findFirst({
|
|
||||||
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
|
|
||||||
});
|
|
||||||
if (!github) {
|
if (!github) {
|
||||||
await prisma.gitSource.create({
|
await prisma.gitSource.create({
|
||||||
data: {
|
data: {
|
||||||
@ -83,6 +65,10 @@ async function main() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Create public gitlab source
|
||||||
|
const gitlab = await prisma.gitSource.findFirst({
|
||||||
|
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
|
||||||
|
});
|
||||||
if (!gitlab) {
|
if (!gitlab) {
|
||||||
await prisma.gitSource.create({
|
await prisma.gitSource.create({
|
||||||
data: {
|
data: {
|
||||||
|
@ -9,7 +9,7 @@ import autoLoad from '@fastify/autoload';
|
|||||||
import socketIO from 'fastify-socket.io'
|
import socketIO from 'fastify-socket.io'
|
||||||
import socketIOServer from './realtime'
|
import socketIOServer from './realtime'
|
||||||
|
|
||||||
import { asyncExecShell, cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, encrypt, executeDockerCmd, executeSSHCmd, generateDatabaseConfiguration, isDev, listSettings, prisma, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
|
import { cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, executeCommand, generateDatabaseConfiguration, isDev, listSettings, prisma, sentryDSN, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
|
||||||
import { scheduler } from './lib/scheduler';
|
import { scheduler } from './lib/scheduler';
|
||||||
import { compareVersions } from 'compare-versions';
|
import { compareVersions } from 'compare-versions';
|
||||||
import Graceful from '@ladjs/graceful'
|
import Graceful from '@ladjs/graceful'
|
||||||
@ -19,14 +19,13 @@ import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handler
|
|||||||
import { checkContainer } from './lib/docker';
|
import { checkContainer } from './lib/docker';
|
||||||
import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
|
import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
|
||||||
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
|
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
declare module 'fastify' {
|
declare module 'fastify' {
|
||||||
interface FastifyInstance {
|
interface FastifyInstance {
|
||||||
config: {
|
config: {
|
||||||
COOLIFY_APP_ID: string,
|
COOLIFY_APP_ID: string,
|
||||||
COOLIFY_SECRET_KEY: string,
|
COOLIFY_SECRET_KEY: string,
|
||||||
COOLIFY_DATABASE_URL: string,
|
COOLIFY_DATABASE_URL: string,
|
||||||
COOLIFY_SENTRY_DSN: string,
|
|
||||||
COOLIFY_IS_ON: string,
|
COOLIFY_IS_ON: string,
|
||||||
COOLIFY_WHITE_LABELED: string,
|
COOLIFY_WHITE_LABELED: string,
|
||||||
COOLIFY_WHITE_LABELED_ICON: string | null,
|
COOLIFY_WHITE_LABELED_ICON: string | null,
|
||||||
@ -37,6 +36,7 @@ declare module 'fastify' {
|
|||||||
|
|
||||||
const port = isDev ? 3001 : 3000;
|
const port = isDev ? 3001 : 3000;
|
||||||
const host = '0.0.0.0';
|
const host = '0.0.0.0';
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
const settings = await prisma.setting.findFirst()
|
const settings = await prisma.setting.findFirst()
|
||||||
const fastify = Fastify({
|
const fastify = Fastify({
|
||||||
@ -58,10 +58,6 @@ const host = '0.0.0.0';
|
|||||||
type: 'string',
|
type: 'string',
|
||||||
default: 'file:../db/dev.db'
|
default: 'file:../db/dev.db'
|
||||||
},
|
},
|
||||||
COOLIFY_SENTRY_DSN: {
|
|
||||||
type: 'string',
|
|
||||||
default: null
|
|
||||||
},
|
|
||||||
COOLIFY_IS_ON: {
|
COOLIFY_IS_ON: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
default: 'docker'
|
default: 'docker'
|
||||||
@ -114,7 +110,6 @@ const host = '0.0.0.0';
|
|||||||
origin: isDev ? "*" : ''
|
origin: isDev ? "*" : ''
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
// To detect allowed origins
|
// To detect allowed origins
|
||||||
// fastify.addHook('onRequest', async (request, reply) => {
|
// fastify.addHook('onRequest', async (request, reply) => {
|
||||||
// console.log(request.headers.host)
|
// console.log(request.headers.host)
|
||||||
@ -182,7 +177,7 @@ const host = '0.0.0.0';
|
|||||||
|
|
||||||
setInterval(async () => {
|
setInterval(async () => {
|
||||||
await migrateServicesToNewTemplate()
|
await migrateServicesToNewTemplate()
|
||||||
}, isDev ? 1000 : 60000)
|
}, isDev ? 10000 : 60000)
|
||||||
|
|
||||||
setInterval(async () => {
|
setInterval(async () => {
|
||||||
await copySSLCertificates();
|
await copySSLCertificates();
|
||||||
@ -207,14 +202,14 @@ async function getIPAddress() {
|
|||||||
try {
|
try {
|
||||||
const settings = await listSettings();
|
const settings = await listSettings();
|
||||||
if (!settings.ipv4) {
|
if (!settings.ipv4) {
|
||||||
console.log(`Getting public IPv4 address...`);
|
|
||||||
const ipv4 = await publicIpv4({ timeout: 2000 })
|
const ipv4 = await publicIpv4({ timeout: 2000 })
|
||||||
|
console.log(`Getting public IPv4 address...`);
|
||||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
|
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!settings.ipv6) {
|
if (!settings.ipv6) {
|
||||||
console.log(`Getting public IPv6 address...`);
|
|
||||||
const ipv6 = await publicIpv6({ timeout: 2000 })
|
const ipv6 = await publicIpv6({ timeout: 2000 })
|
||||||
|
console.log(`Getting public IPv6 address...`);
|
||||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
|
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,13 +223,13 @@ async function getTagsTemplates() {
|
|||||||
const tags = await fs.readFile('./devTags.json', 'utf8')
|
const tags = await fs.readFile('./devTags.json', 'utf8')
|
||||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)))
|
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)))
|
||||||
await fs.writeFile('./tags.json', tags)
|
await fs.writeFile('./tags.json', tags)
|
||||||
console.log('Tags and templates loaded in dev mode...')
|
console.log('[004] Tags and templates loaded in dev mode...')
|
||||||
} else {
|
} else {
|
||||||
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
||||||
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
||||||
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
||||||
await fs.writeFile('/app/tags.json', tags)
|
await fs.writeFile('/app/tags.json', tags)
|
||||||
console.log('Tags and templates loaded...')
|
console.log('[004] Tags and templates loaded...')
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -243,16 +238,44 @@ async function getTagsTemplates() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function initServer() {
|
async function initServer() {
|
||||||
|
const appId = process.env['COOLIFY_APP_ID'];
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } })
|
||||||
try {
|
try {
|
||||||
console.log(`Initializing server...`);
|
if (settings.doNotTrack === true) {
|
||||||
await asyncExecShell(`docker network create --attachable coolify`);
|
console.log('[000] Telemetry disabled...')
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if (settings.sentryDSN !== sentryDSN) {
|
||||||
|
await prisma.setting.update({ where: { id: '0' }, data: { sentryDSN } })
|
||||||
|
}
|
||||||
|
// Initialize Sentry
|
||||||
|
// Sentry.init({
|
||||||
|
// dsn: sentryDSN,
|
||||||
|
// environment: isDev ? 'development' : 'production',
|
||||||
|
// release: version
|
||||||
|
// });
|
||||||
|
// console.log('[000] Sentry initialized...')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
console.log(`[001] Initializing server...`);
|
||||||
|
await executeCommand({ command: `docker network create --attachable coolify` });
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
try {
|
try {
|
||||||
|
console.log(`[002] Cleanup stucked builds...`);
|
||||||
const isOlder = compareVersions('3.8.1', version);
|
const isOlder = compareVersions('3.8.1', version);
|
||||||
if (isOlder === 1) {
|
if (isOlder === 1) {
|
||||||
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
|
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
|
||||||
}
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
try {
|
||||||
|
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
||||||
|
await fs.rm('/tmp/build-sources', { recursive: true, force: true })
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getArch() {
|
async function getArch() {
|
||||||
@ -300,14 +323,10 @@ async function autoUpdater() {
|
|||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
if (isAutoUpdateEnabled) {
|
if (isAutoUpdateEnabled) {
|
||||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` })
|
||||||
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
|
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` })
|
||||||
await asyncExecShell(
|
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` })
|
||||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` })
|
||||||
);
|
|
||||||
await asyncExecShell(
|
|
||||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.log('Updating (not really in dev mode).');
|
console.log('Updating (not really in dev mode).');
|
||||||
@ -328,8 +347,8 @@ async function checkFluentBit() {
|
|||||||
});
|
});
|
||||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
|
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
|
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
||||||
await asyncExecShell(`docker compose up -d fluent-bit`);
|
await executeCommand({ command: `docker compose up -d fluent-bit` });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -439,25 +458,25 @@ async function copySSLCertificates() {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
} finally {
|
} finally {
|
||||||
await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`)
|
await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`)
|
await executeCommand({ command: `scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
await executeCommand({ sshCommand: true, shell: true, dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log({ error })
|
console.log({ error })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function copyLocalCertificates(id: string) {
|
async function copyLocalCertificates(id: string) {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`)
|
await executeCommand({ command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`, shell: true })
|
||||||
await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
await executeCommand({ command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
await executeCommand({ command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log({ error })
|
console.log({ error })
|
||||||
}
|
}
|
||||||
@ -475,12 +494,13 @@ async function cleanupStorage() {
|
|||||||
try {
|
try {
|
||||||
let stdout = null
|
let stdout = null
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
|
const output = await executeCommand({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`, shell: true })
|
||||||
stdout = output.stdout;
|
stdout = output.stdout;
|
||||||
} else {
|
} else {
|
||||||
const output = await asyncExecShell(
|
const output = await executeCommand({
|
||||||
`df -kPT /`
|
command:
|
||||||
);
|
`df -kPT /`
|
||||||
|
});
|
||||||
stdout = output.stdout;
|
stdout = output.stdout;
|
||||||
}
|
}
|
||||||
let lines = stdout.trim().split('\n');
|
let lines = stdout.trim().split('\n');
|
||||||
|
@ -3,8 +3,8 @@ import crypto from 'crypto';
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
|
import { copyBaseConfigurationFiles, makeLabelForSimpleDockerfile, makeLabelForStandaloneApplication, saveBuildLog, saveDockerRegistryCredentials, setDefaultConfiguration } from '../lib/buildPacks/common';
|
||||||
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma, decryptApplication } from '../lib/common';
|
import { createDirectories, decrypt, defaultComposeConfiguration, getDomain, prisma, decryptApplication, isDev, pushToRegistry, executeCommand } from '../lib/common';
|
||||||
import * as importers from '../lib/importers';
|
import * as importers from '../lib/importers';
|
||||||
import * as buildpacks from '../lib/buildPacks';
|
import * as buildpacks from '../lib/buildPacks';
|
||||||
|
|
||||||
@ -37,57 +37,257 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
|
|
||||||
for (const queueBuild of queuedBuilds) {
|
for (const queueBuild of queuedBuilds) {
|
||||||
actions.push(async () => {
|
actions.push(async () => {
|
||||||
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { dockerRegistry: true, destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
||||||
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild
|
|
||||||
|
let { id: buildId, type, gitSourceId, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild
|
||||||
application = decryptApplication(application)
|
application = decryptApplication(application)
|
||||||
|
|
||||||
|
if (!gitSourceId && application.simpleDockerfile) {
|
||||||
|
const {
|
||||||
|
id: applicationId,
|
||||||
|
destinationDocker,
|
||||||
|
destinationDockerId,
|
||||||
|
secrets,
|
||||||
|
port,
|
||||||
|
persistentStorage,
|
||||||
|
exposePort,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistry
|
||||||
|
} = application
|
||||||
|
const { workdir } = await createDirectories({ repository: applicationId, buildId });
|
||||||
|
try {
|
||||||
|
if (queueBuild.status === 'running') {
|
||||||
|
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
const volumes =
|
||||||
|
persistentStorage?.map((storage) => {
|
||||||
|
if (storage.oldPath) {
|
||||||
|
return `${applicationId}${storage.path.replace(/\//gi, '-').replace('-app', '')}:${storage.path}`;
|
||||||
|
}
|
||||||
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
|
}) || [];
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||||
|
try {
|
||||||
|
const { stdout: containers } = await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}`
|
||||||
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
const envs = [
|
||||||
|
`PORT=${port}`
|
||||||
|
];
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
|
if (isSecretFound.length > 0) {
|
||||||
|
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||||
|
} else {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
|
||||||
|
let envFound = false;
|
||||||
|
try {
|
||||||
|
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.writeFile(`${workdir}/Dockerfile`, simpleDockerfile);
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
|
const labels = makeLabelForSimpleDockerfile({
|
||||||
|
applicationId,
|
||||||
|
type,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port,
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const composeVolumes = volumes.map((volume) => {
|
||||||
|
return {
|
||||||
|
[`${volume.split(':')[0]}`]: {
|
||||||
|
name: volume.split(':')[0]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const composeFile = {
|
||||||
|
version: '3.8',
|
||||||
|
services: {
|
||||||
|
[applicationId]: {
|
||||||
|
build: {
|
||||||
|
context: workdir,
|
||||||
|
},
|
||||||
|
image: `${applicationId}:${buildId}`,
|
||||||
|
container_name: applicationId,
|
||||||
|
volumes,
|
||||||
|
labels,
|
||||||
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
|
depends_on: [],
|
||||||
|
expose: [port],
|
||||||
|
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||||
|
...defaultComposeConfiguration(destinationDocker.network),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
networks: {
|
||||||
|
[destinationDocker.network]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
|
};
|
||||||
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
|
await executeCommand({ debug: true, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
|
} catch (error) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||||
|
if (foundBuild) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: {
|
||||||
|
status: 'failed'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||||
|
if (foundBuild) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: {
|
||||||
|
status: 'failed'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (error !== 1) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
if (error instanceof Error) {
|
||||||
|
await saveBuildLog({ line: error.message, buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (application.dockerRegistryImageName) {
|
||||||
|
const customTag = application.dockerRegistryImageName.split(':')[1] || buildId;
|
||||||
|
const imageName = application.dockerRegistryImageName.split(':')[0];
|
||||||
|
await saveBuildLog({ line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`, buildId, applicationId: application.id });
|
||||||
|
await pushToRegistry(application, workdir, buildId, imageName, customTag)
|
||||||
|
await saveBuildLog({ line: "Success", buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.stdout) {
|
||||||
|
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||||
|
}
|
||||||
|
if (error.stderr) {
|
||||||
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const originalApplicationId = application.id
|
const originalApplicationId = application.id
|
||||||
|
const {
|
||||||
|
id: applicationId,
|
||||||
|
name,
|
||||||
|
destinationDocker,
|
||||||
|
destinationDockerId,
|
||||||
|
gitSource,
|
||||||
|
configHash,
|
||||||
|
fqdn,
|
||||||
|
projectId,
|
||||||
|
secrets,
|
||||||
|
phpModules,
|
||||||
|
settings,
|
||||||
|
persistentStorage,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
denoOptions,
|
||||||
|
exposePort,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType,
|
||||||
|
gitCommitHash,
|
||||||
|
dockerRegistry
|
||||||
|
} = application
|
||||||
|
|
||||||
|
let {
|
||||||
|
branch,
|
||||||
|
repository,
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory,
|
||||||
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
|
dockerComposeConfiguration,
|
||||||
|
denoMainFile
|
||||||
|
} = application
|
||||||
|
|
||||||
|
let imageId = applicationId;
|
||||||
|
let domain = getDomain(fqdn);
|
||||||
|
|
||||||
|
let location = null;
|
||||||
|
|
||||||
|
let tag = null;
|
||||||
|
let customTag = null;
|
||||||
|
let imageName = null;
|
||||||
|
|
||||||
|
let imageFoundLocally = false;
|
||||||
|
let imageFoundRemotely = false;
|
||||||
|
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
|
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
|
||||||
if (previewApplications.length > 0) {
|
if (previewApplications.length > 0) {
|
||||||
previewApplicationId = previewApplications[0].id
|
previewApplicationId = previewApplications[0].id
|
||||||
}
|
}
|
||||||
|
// Previews, we need to get the source branch and set subdomain
|
||||||
|
branch = sourceBranch;
|
||||||
|
domain = `${pullmergeRequestId}.${domain}`;
|
||||||
|
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||||
|
repository = sourceRepository || repository;
|
||||||
}
|
}
|
||||||
const usableApplicationId = previewApplicationId || originalApplicationId
|
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||||
try {
|
try {
|
||||||
if (queueBuild.status === 'running') {
|
if (queueBuild.status === 'running') {
|
||||||
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
||||||
}
|
}
|
||||||
const {
|
|
||||||
id: applicationId,
|
|
||||||
name,
|
|
||||||
destinationDocker,
|
|
||||||
destinationDockerId,
|
|
||||||
gitSource,
|
|
||||||
configHash,
|
|
||||||
fqdn,
|
|
||||||
projectId,
|
|
||||||
secrets,
|
|
||||||
phpModules,
|
|
||||||
settings,
|
|
||||||
persistentStorage,
|
|
||||||
pythonWSGI,
|
|
||||||
pythonModule,
|
|
||||||
pythonVariable,
|
|
||||||
denoOptions,
|
|
||||||
exposePort,
|
|
||||||
baseImage,
|
|
||||||
baseBuildImage,
|
|
||||||
deploymentType,
|
|
||||||
} = application
|
|
||||||
let {
|
|
||||||
branch,
|
|
||||||
repository,
|
|
||||||
buildPack,
|
|
||||||
port,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
publishDirectory,
|
|
||||||
dockerFileLocation,
|
|
||||||
dockerComposeConfiguration,
|
|
||||||
denoMainFile
|
|
||||||
} = application
|
|
||||||
const currentHash = crypto
|
const currentHash = crypto
|
||||||
.createHash('sha256')
|
.createHash('sha256')
|
||||||
.update(
|
.update(
|
||||||
@ -113,22 +313,21 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
)
|
)
|
||||||
.digest('hex');
|
.digest('hex');
|
||||||
const { debug } = settings;
|
const { debug } = settings;
|
||||||
let imageId = applicationId;
|
if (!debug) {
|
||||||
let domain = getDomain(fqdn);
|
await saveBuildLog({
|
||||||
|
line: `Debug logging is disabled. Enable it above if necessary!`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
const volumes =
|
const volumes =
|
||||||
persistentStorage?.map((storage) => {
|
persistentStorage?.map((storage) => {
|
||||||
if (storage.oldPath) {
|
if (storage.oldPath) {
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-').replace('-app','')}:${storage.path}`;
|
return `${applicationId}${storage.path.replace(/\//gi, '-').replace('-app', '')}:${storage.path}`;
|
||||||
}
|
}
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
}) || [];
|
}) || [];
|
||||||
// Previews, we need to get the source branch and set subdomain
|
|
||||||
if (pullmergeRequestId) {
|
|
||||||
branch = sourceBranch;
|
|
||||||
domain = `${pullmergeRequestId}.${domain}`;
|
|
||||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
|
||||||
repository = sourceRepository || repository;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration)
|
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration)
|
||||||
@ -141,7 +340,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
}
|
}
|
||||||
if (destinationType === 'docker') {
|
if (destinationType === 'docker') {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
|
||||||
const configuration = await setDefaultConfiguration(application);
|
const configuration = await setDefaultConfiguration(application);
|
||||||
|
|
||||||
buildPack = configuration.buildPack;
|
buildPack = configuration.buildPack;
|
||||||
@ -152,6 +351,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
publishDirectory = configuration.publishDirectory;
|
publishDirectory = configuration.publishDirectory;
|
||||||
baseDirectory = configuration.baseDirectory || '';
|
baseDirectory = configuration.baseDirectory || '';
|
||||||
dockerFileLocation = configuration.dockerFileLocation;
|
dockerFileLocation = configuration.dockerFileLocation;
|
||||||
|
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||||
denoMainFile = configuration.denoMainFile;
|
denoMainFile = configuration.denoMainFile;
|
||||||
const commit = await importers[gitSource.type]({
|
const commit = await importers[gitSource.type]({
|
||||||
applicationId,
|
applicationId,
|
||||||
@ -161,6 +361,8 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
githubAppId: gitSource.githubApp?.id,
|
githubAppId: gitSource.githubApp?.id,
|
||||||
gitlabAppId: gitSource.gitlabApp?.id,
|
gitlabAppId: gitSource.gitlabApp?.id,
|
||||||
customPort: gitSource.customPort,
|
customPort: gitSource.customPort,
|
||||||
|
gitCommitHash,
|
||||||
|
configuration,
|
||||||
repository,
|
repository,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@ -174,10 +376,21 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (!commit) {
|
if (!commit) {
|
||||||
throw new Error('No commit found?');
|
throw new Error('No commit found?');
|
||||||
}
|
}
|
||||||
let tag = commit.slice(0, 7);
|
tag = commit.slice(0, 7);
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||||
}
|
}
|
||||||
|
if (application.dockerRegistryImageName) {
|
||||||
|
imageName = application.dockerRegistryImageName.split(':')[0]
|
||||||
|
customTag = application.dockerRegistryImageName.split(':')[1] || tag
|
||||||
|
} else {
|
||||||
|
customTag = tag
|
||||||
|
imageName = applicationId;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
customTag = `${customTag}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||||
@ -187,7 +400,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (configHash !== currentHash) {
|
if (configHash !== currentHash) {
|
||||||
deployNeeded = true;
|
deployNeeded = true;
|
||||||
if (configHash) {
|
if (configHash) {
|
||||||
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
await saveBuildLog({ line: 'Configuration changed', buildId, applicationId });
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
deployNeeded = false;
|
deployNeeded = false;
|
||||||
@ -196,16 +409,33 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
deployNeeded = true;
|
deployNeeded = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
let imageFound = false;
|
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker image inspect ${applicationId}:${tag}`
|
command: `docker image inspect ${applicationId}:${tag}`
|
||||||
})
|
})
|
||||||
imageFound = true;
|
imageFoundLocally = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker ${location ? `--config ${location}` : ''} pull ${imageName}:${customTag}`
|
||||||
|
})
|
||||||
|
imageFoundRemotely = true;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
let imageFound = `${applicationId}:${tag}`
|
||||||
|
if (imageFoundRemotely) {
|
||||||
|
imageFound = `${imageName}:${customTag}`
|
||||||
|
}
|
||||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||||
const labels = makeLabelForStandaloneApplication({
|
const labels = makeLabelForStandaloneApplication({
|
||||||
applicationId,
|
applicationId,
|
||||||
@ -226,7 +456,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
publishDirectory
|
publishDirectory
|
||||||
});
|
});
|
||||||
if (forceRebuild) deployNeeded = true
|
if (forceRebuild) deployNeeded = true
|
||||||
if (!imageFound || deployNeeded) {
|
if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
|
||||||
if (buildpacks[buildPack])
|
if (buildpacks[buildPack])
|
||||||
await buildpacks[buildPack]({
|
await buildpacks[buildPack]({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
@ -260,6 +490,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
pythonVariable,
|
pythonVariable,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile,
|
denoMainFile,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
@ -271,26 +502,37 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
throw new Error(`Build pack ${buildPack} not found.`);
|
throw new Error(`Build pack ${buildPack} not found.`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
|
if (imageFoundRemotely || deployNeeded) {
|
||||||
|
await saveBuildLog({ line: `Container image ${imageFound} found in Docker Registry - reuising it`, buildId, applicationId });
|
||||||
|
} else {
|
||||||
|
if (imageFoundLocally || deployNeeded) {
|
||||||
|
await saveBuildLog({ line: `Container image ${imageFound} found locally - reuising it`, buildId, applicationId });
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (buildPack === 'compose') {
|
if (buildPack === 'compose') {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
console.log({ debug })
|
||||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
await executeCommand({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id: applicationId },
|
where: { id: applicationId },
|
||||||
@ -312,14 +554,19 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
@ -343,6 +590,10 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
let envFound = false;
|
let envFound = false;
|
||||||
try {
|
try {
|
||||||
@ -351,7 +602,6 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
//
|
//
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
|
||||||
const composeVolumes = volumes.map((volume) => {
|
const composeVolumes = volumes.map((volume) => {
|
||||||
return {
|
return {
|
||||||
[`${volume.split(':')[0]}`]: {
|
[`${volume.split(':')[0]}`]: {
|
||||||
@ -363,7 +613,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
version: '3.8',
|
version: '3.8',
|
||||||
services: {
|
services: {
|
||||||
[imageId]: {
|
[imageId]: {
|
||||||
image: `${applicationId}:${tag}`,
|
image: imageFound,
|
||||||
container_name: imageId,
|
container_name: imageId,
|
||||||
volumes,
|
volumes,
|
||||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
@ -382,8 +632,8 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({ debug, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await saveBuildLog({ line: error, buildId, applicationId });
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||||
@ -397,16 +647,14 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
}
|
}
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
|
||||||
if (!pullmergeRequestId) await prisma.application.update({
|
if (!pullmergeRequestId) await prisma.application.update({
|
||||||
where: { id: applicationId },
|
where: { id: applicationId },
|
||||||
data: { configHash: currentHash }
|
data: { configHash: currentHash }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} catch (error) {
|
||||||
catch (error) {
|
|
||||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||||
if (foundBuild) {
|
if (foundBuild) {
|
||||||
await prisma.build.update({
|
await prisma.build.update({
|
||||||
@ -419,6 +667,29 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (error !== 1) {
|
if (error !== 1) {
|
||||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||||
}
|
}
|
||||||
|
if (error instanceof Error) {
|
||||||
|
await saveBuildLog({ line: error.message, buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
|
||||||
|
await saveBuildLog({ line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`, buildId, applicationId: application.id });
|
||||||
|
await pushToRegistry(application, workdir, tag, imageName, customTag)
|
||||||
|
await saveBuildLog({ line: "Success", buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.stdout) {
|
||||||
|
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||||
|
}
|
||||||
|
if (error.stderr) {
|
||||||
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
|
}
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -467,7 +467,6 @@ async function plausibleAnalytics(service: any, template: any) {
|
|||||||
// Disconnect old service data
|
// Disconnect old service data
|
||||||
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
||||||
}
|
}
|
||||||
|
|
||||||
async function migrateSettings(settings: any[], service: any, template: any) {
|
async function migrateSettings(settings: any[], service: any, template: any) {
|
||||||
for (const setting of settings) {
|
for (const setting of settings) {
|
||||||
try {
|
try {
|
||||||
@ -528,4 +527,4 @@ async function createVolumes(service: any, template: any) {
|
|||||||
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
|
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
|
||||||
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
|
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
import { base64Encode, decrypt, encrypt, executeCommand, generateTimestamp, getDomain, isARM, isDev, prisma, version } from "../common";
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { day } from "../dayjs";
|
import { day } from "../dayjs";
|
||||||
|
|
||||||
@ -52,6 +52,14 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
{
|
{
|
||||||
value: 'webdevops/apache:alpine',
|
value: 'webdevops/apache:alpine',
|
||||||
label: 'webdevops/apache:alpine'
|
label: 'webdevops/apache:alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'nginx:alpine',
|
||||||
|
label: 'nginx:alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'httpd:alpine',
|
||||||
|
label: 'httpd:alpine (Apache)'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
const rustVersions = [
|
const rustVersions = [
|
||||||
@ -214,8 +222,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
label: 'webdevops/php-apache:7.1-alpine'
|
label: 'webdevops/php-apache:7.1-alpine'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
value: 'webdevops/php-nginx:7.1-alpine',
|
value: 'php:8.1-fpm',
|
||||||
label: 'webdevops/php-nginx:7.1-alpine'
|
label: 'php:8.1-fpm'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.0-fpm',
|
||||||
|
label: 'php:8.0-fpm'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.1-fpm-alpine',
|
||||||
|
label: 'php:8.1-fpm-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.0-fpm-alpine',
|
||||||
|
label: 'php:8.0-fpm-alpine'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
const pythonVersions = [
|
const pythonVersions = [
|
||||||
@ -306,8 +326,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
};
|
};
|
||||||
if (nodeBased.includes(buildPack)) {
|
if (nodeBased.includes(buildPack)) {
|
||||||
if (deploymentType === 'static') {
|
if (deploymentType === 'static') {
|
||||||
payload.baseImage = 'webdevops/nginx:alpine';
|
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||||
payload.baseImages = staticVersions;
|
payload.baseImages = isARM(process.arch) ? staticVersions.filter((version) => !version.value.includes('webdevops')) : staticVersions;
|
||||||
payload.baseBuildImage = 'node:lts';
|
payload.baseBuildImage = 'node:lts';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
} else {
|
} else {
|
||||||
@ -318,8 +338,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (staticApps.includes(buildPack)) {
|
if (staticApps.includes(buildPack)) {
|
||||||
payload.baseImage = 'webdevops/nginx:alpine';
|
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||||
payload.baseImages = staticVersions;
|
payload.baseImages = isARM(process.arch) ? staticVersions.filter((version) => !version.value.includes('webdevops')) : staticVersions;
|
||||||
payload.baseBuildImage = 'node:lts';
|
payload.baseBuildImage = 'node:lts';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
}
|
}
|
||||||
@ -337,12 +357,12 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
payload.baseImage = 'denoland/deno:latest';
|
payload.baseImage = 'denoland/deno:latest';
|
||||||
}
|
}
|
||||||
if (buildPack === 'php') {
|
if (buildPack === 'php') {
|
||||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
payload.baseImage = isARM(process.arch) ? 'php:8.1-fpm-alpine' : 'webdevops/php-apache:8.2-alpine';
|
||||||
payload.baseImages = phpVersions;
|
payload.baseImages = isARM(process.arch) ? phpVersions.filter((version) => !version.value.includes('webdevops')) : phpVersions
|
||||||
}
|
}
|
||||||
if (buildPack === 'laravel') {
|
if (buildPack === 'laravel') {
|
||||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
payload.baseImage = isARM(process.arch) ? 'php:8.1-fpm-alpine' : 'webdevops/php-apache:8.2-alpine';
|
||||||
payload.baseImages = phpVersions;
|
payload.baseImages = isARM(process.arch) ? phpVersions.filter((version) => !version.value.includes('webdevops')) : phpVersions
|
||||||
payload.baseBuildImage = 'node:18';
|
payload.baseBuildImage = 'node:18';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
}
|
}
|
||||||
@ -363,6 +383,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
} = data;
|
} = data;
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
@ -392,6 +413,12 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
} else {
|
} else {
|
||||||
dockerFileLocation = '/Dockerfile';
|
dockerFileLocation = '/Dockerfile';
|
||||||
}
|
}
|
||||||
|
if (dockerComposeFileLocation) {
|
||||||
|
if (!dockerComposeFileLocation.startsWith('/')) dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||||
|
if (dockerComposeFileLocation.endsWith('/')) dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||||
|
} else {
|
||||||
|
dockerComposeFileLocation = '/Dockerfile';
|
||||||
|
}
|
||||||
if (!denoMainFile) {
|
if (!denoMainFile) {
|
||||||
denoMainFile = 'main.ts';
|
denoMainFile = 'main.ts';
|
||||||
}
|
}
|
||||||
@ -405,6 +432,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -461,10 +489,12 @@ export const saveBuildLog = async ({
|
|||||||
buildId: string;
|
buildId: string;
|
||||||
applicationId: string;
|
applicationId: string;
|
||||||
}): Promise<any> => {
|
}): Promise<any> => {
|
||||||
|
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||||
|
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||||
const { default: got } = await import('got')
|
const { default: got } = await import('got')
|
||||||
if (typeof line === 'object' && line) {
|
if (typeof line === 'object' && line) {
|
||||||
if (line.shortMessage) {
|
if (line.shortMessage) {
|
||||||
line = line.shortMessage + '\n' + line.stderr;
|
line = line.shortMessage + '\n' + line.stderr;
|
||||||
} else {
|
} else {
|
||||||
line = JSON.stringify(line);
|
line = JSON.stringify(line);
|
||||||
}
|
}
|
||||||
@ -564,6 +594,7 @@ export async function copyBaseConfigurationFiles(
|
|||||||
`
|
`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
@ -577,6 +608,29 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||||
|
if (!username || !password) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
let decryptedPassword = decrypt(password);
|
||||||
|
const location = `${workdir}/.docker`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.mkdir(`${workdir}/.docker`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
const payload = JSON.stringify({
|
||||||
|
"auths": {
|
||||||
|
[url]: {
|
||||||
|
"auth": Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
await fs.writeFile(`${location}/config.json`, payload)
|
||||||
|
return location
|
||||||
|
}
|
||||||
export async function buildImage({
|
export async function buildImage({
|
||||||
applicationId,
|
applicationId,
|
||||||
tag,
|
tag,
|
||||||
@ -589,33 +643,36 @@ export async function buildImage({
|
|||||||
commit
|
commit
|
||||||
}) {
|
}) {
|
||||||
if (isCache) {
|
if (isCache) {
|
||||||
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||||
} else {
|
} else {
|
||||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||||
}
|
|
||||||
if (!debug) {
|
|
||||||
await saveBuildLog({
|
|
||||||
line: `Debug logging is disabled. Enable it above if necessary!`,
|
|
||||||
buildId,
|
|
||||||
applicationId
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
|
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
|
||||||
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
||||||
|
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
let location = null
|
||||||
|
|
||||||
|
const { dockerRegistry } = await prisma.application.findUnique({ where: { id: applicationId }, select: { dockerRegistry: true } })
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
|
await executeCommand({ stream: true, debug, buildId, applicationId, dockerId, command: `docker ${location ? `--config ${location}` : ''} build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
||||||
|
|
||||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
|
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
|
||||||
if (status === 'canceled') {
|
if (status === 'canceled') {
|
||||||
throw new Error('Deployment canceled.')
|
throw new Error('Canceled.')
|
||||||
}
|
|
||||||
if (isCache) {
|
|
||||||
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });
|
|
||||||
} else {
|
|
||||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||||
|
return [
|
||||||
|
'coolify.managed=true',
|
||||||
|
`coolify.version=${version}`,
|
||||||
|
`coolify.applicationId=${applicationId}`,
|
||||||
|
`coolify.type=standalone-application`
|
||||||
|
];
|
||||||
|
}
|
||||||
export function makeLabelForStandaloneApplication({
|
export function makeLabelForStandaloneApplication({
|
||||||
applicationId,
|
applicationId,
|
||||||
fqdn,
|
fqdn,
|
||||||
@ -644,6 +701,7 @@ export function makeLabelForStandaloneApplication({
|
|||||||
`coolify.version=${version}`,
|
`coolify.version=${version}`,
|
||||||
`coolify.applicationId=${applicationId}`,
|
`coolify.applicationId=${applicationId}`,
|
||||||
`coolify.type=standalone-application`,
|
`coolify.type=standalone-application`,
|
||||||
|
`coolify.name=${name}`,
|
||||||
`coolify.configuration=${base64Encode(
|
`coolify.configuration=${base64Encode(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
applicationId,
|
applicationId,
|
||||||
|
@ -1,127 +1,126 @@
|
|||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { defaultComposeConfiguration, executeDockerCmd } from '../common';
|
import { defaultComposeConfiguration, executeCommand } from '../common';
|
||||||
import { buildImage, saveBuildLog } from './common';
|
import { saveBuildLog } from './common';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
let {
|
let {
|
||||||
applicationId,
|
applicationId,
|
||||||
debug,
|
debug,
|
||||||
buildId,
|
buildId,
|
||||||
dockerId,
|
dockerId,
|
||||||
network,
|
network,
|
||||||
volumes,
|
volumes,
|
||||||
labels,
|
labels,
|
||||||
workdir,
|
workdir,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
secrets,
|
secrets,
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
port,
|
dockerComposeConfiguration,
|
||||||
dockerComposeConfiguration
|
dockerComposeFileLocation
|
||||||
} = data
|
} = data;
|
||||||
const fileYml = `${workdir}${baseDirectory}/docker-compose.yml`;
|
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||||
const fileYaml = `${workdir}${baseDirectory}/docker-compose.yaml`;
|
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
|
||||||
let dockerComposeRaw = null;
|
const dockerComposeYaml = yaml.load(dockerComposeRaw);
|
||||||
let isYml = false;
|
if (!dockerComposeYaml.services) {
|
||||||
try {
|
throw 'No Services found in docker-compose file.';
|
||||||
dockerComposeRaw = await fs.readFile(`${fileYml}`, 'utf8')
|
}
|
||||||
isYml = true
|
const envs = [];
|
||||||
} catch (error) { }
|
if (secrets.length > 0) {
|
||||||
try {
|
secrets.forEach((secret) => {
|
||||||
dockerComposeRaw = await fs.readFile(`${fileYaml}`, 'utf8')
|
if (pullmergeRequestId) {
|
||||||
} catch (error) { }
|
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||||
|
if (isSecretFound.length > 0) {
|
||||||
|
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||||
|
} else {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
let envFound = false;
|
||||||
|
try {
|
||||||
|
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
const composeVolumes = [];
|
||||||
|
if (volumes.length > 0) {
|
||||||
|
for (const volume of volumes) {
|
||||||
|
let [v, path] = volume.split(':');
|
||||||
|
composeVolumes[v] = {
|
||||||
|
name: v
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!dockerComposeRaw) {
|
let networks = {};
|
||||||
throw ('docker-compose.yml or docker-compose.yaml are not found!');
|
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||||
}
|
value['container_name'] = `${applicationId}-${key}`;
|
||||||
const dockerComposeYaml = yaml.load(dockerComposeRaw)
|
value['env_file'] = envFound ? [`${workdir}/.env`] : [];
|
||||||
if (!dockerComposeYaml.services) {
|
value['labels'] = labels;
|
||||||
throw 'No Services found in docker-compose file.'
|
// TODO: If we support separated volume for each service, we need to add it here
|
||||||
}
|
if (value['volumes']?.length > 0) {
|
||||||
const envs = [];
|
value['volumes'] = value['volumes'].map((volume) => {
|
||||||
if (Object.entries(dockerComposeYaml.services).length === 1) {
|
let [v, path, permission] = volume.split(':');
|
||||||
envs.push(`PORT=${port}`)
|
if (!path) {
|
||||||
}
|
path = v;
|
||||||
if (secrets.length > 0) {
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
secrets.forEach((secret) => {
|
} else {
|
||||||
if (pullmergeRequestId) {
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
}
|
||||||
if (isSecretFound.length > 0) {
|
composeVolumes[v] = {
|
||||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
name: v
|
||||||
} else {
|
};
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||||
}
|
});
|
||||||
} else {
|
}
|
||||||
if (!secret.isPRMRSecret) {
|
if (volumes.length > 0) {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
for (const volume of volumes) {
|
||||||
}
|
value['volumes'].push(volume);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
}
|
if (dockerComposeConfiguration[key].port) {
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||||
let envFound = false;
|
}
|
||||||
try {
|
if (value['networks']?.length > 0) {
|
||||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
value['networks'].forEach((network) => {
|
||||||
} catch (error) {
|
networks[network] = {
|
||||||
//
|
name: network
|
||||||
}
|
};
|
||||||
const composeVolumes = [];
|
});
|
||||||
if (volumes.length > 0) {
|
}
|
||||||
for (const volume of volumes) {
|
value['networks'] = [...(value['networks'] || ''), network];
|
||||||
let [v, path] = volume.split(':');
|
dockerComposeYaml.services[key] = {
|
||||||
composeVolumes[v] = {
|
...dockerComposeYaml.services[key],
|
||||||
name: v,
|
restart: defaultComposeConfiguration(network).restart,
|
||||||
}
|
deploy: defaultComposeConfiguration(network).deploy
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
if (Object.keys(composeVolumes).length > 0) {
|
||||||
let networks = {}
|
dockerComposeYaml['volumes'] = { ...composeVolumes };
|
||||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
}
|
||||||
value['container_name'] = `${applicationId}-${key}`
|
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||||
value['env_file'] = envFound ? [`${workdir}/.env`] : []
|
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||||
value['labels'] = labels
|
await executeCommand({
|
||||||
// TODO: If we support separated volume for each service, we need to add it here
|
debug,
|
||||||
if (value['volumes']?.length > 0) {
|
buildId,
|
||||||
value['volumes'] = value['volumes'].map((volume) => {
|
applicationId,
|
||||||
let [v, path, permission] = volume.split(':');
|
dockerId,
|
||||||
if (!path) {
|
command: `docker compose --project-directory ${workdir} pull`
|
||||||
path = v;
|
});
|
||||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`
|
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||||
} else {
|
await executeCommand({
|
||||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`
|
debug,
|
||||||
}
|
buildId,
|
||||||
composeVolumes[v] = {
|
applicationId,
|
||||||
name: v
|
dockerId,
|
||||||
}
|
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||||
return `${v}:${path}${permission ? ':' + permission : ''}`
|
});
|
||||||
})
|
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||||
}
|
|
||||||
if (volumes.length > 0) {
|
|
||||||
for (const volume of volumes) {
|
|
||||||
value['volumes'].push(volume)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (dockerComposeConfiguration[key].port) {
|
|
||||||
value['expose'] = [dockerComposeConfiguration[key].port]
|
|
||||||
}
|
|
||||||
if (value['networks']?.length > 0) {
|
|
||||||
value['networks'].forEach((network) => {
|
|
||||||
networks[network] = {
|
|
||||||
name: network
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
value['networks'] = [...value['networks'] || '', network]
|
|
||||||
dockerComposeYaml.services[key] = { ...dockerComposeYaml.services[key], restart: defaultComposeConfiguration(network).restart, deploy: defaultComposeConfiguration(network).deploy }
|
|
||||||
|
|
||||||
}
|
|
||||||
if (Object.keys(composeVolumes).length > 0) {
|
|
||||||
dockerComposeYaml['volumes'] = { ...composeVolumes }
|
|
||||||
}
|
|
||||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } })
|
|
||||||
await fs.writeFile(`${workdir}/docker-compose.${isYml ? 'yml' : 'yaml'}`, yaml.dump(dockerComposeYaml));
|
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` })
|
|
||||||
await saveBuildLog({ line: 'Pulling images from Compose file.', buildId, applicationId });
|
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` })
|
|
||||||
await saveBuildLog({ line: 'Building images from Compose file.', buildId, applicationId });
|
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,11 @@ export default async function (data) {
|
|||||||
.toString()
|
.toString()
|
||||||
.trim()
|
.trim()
|
||||||
.split('\n');
|
.split('\n');
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.forEach((line, index) => {
|
||||||
|
if (line.startsWith('FROM')) {
|
||||||
|
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (secret.isBuildSecret) {
|
if (secret.isBuildSecret) {
|
||||||
@ -28,11 +32,9 @@ export default async function (data) {
|
|||||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||||
) {
|
) {
|
||||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
|
||||||
|
|
||||||
Dockerfile.forEach((line, index) => {
|
Dockerfile.forEach((line, index) => {
|
||||||
if (line.startsWith('FROM')) {
|
if (line.startsWith('FROM')) {
|
||||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}=${secret.value}`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
import { executeDockerCmd, prisma } from "../common"
|
import { executeCommand } from "../common"
|
||||||
import { saveBuildLog } from "./common";
|
import { saveBuildLog } from "./common";
|
||||||
|
|
||||||
export default async function (data: any): Promise<void> {
|
export default async function (data: any): Promise<void> {
|
||||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||||
try {
|
try {
|
||||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
buildId,
|
buildId,
|
||||||
debug,
|
debug,
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||||
})
|
})
|
||||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import TOML from '@iarna/toml';
|
import TOML from '@iarna/toml';
|
||||||
import { asyncExecShell } from '../common';
|
import { executeCommand } from '../common';
|
||||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||||
@ -28,7 +28,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
|||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { workdir, baseImage, baseBuildImage } = data;
|
const { workdir, baseImage, baseBuildImage } = data;
|
||||||
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
|
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||||
const parsedToml: any = TOML.parse(cargoToml);
|
const parsedToml: any = TOML.parse(cargoToml);
|
||||||
const name = parsedToml.package.name;
|
const name = parsedToml.package.name;
|
||||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||||
|
@ -18,7 +18,11 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
Dockerfile.push('WORKDIR /app');
|
if (baseImage?.includes('httpd')) {
|
||||||
|
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
|
||||||
|
} else {
|
||||||
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
}
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
|
@ -8,18 +8,20 @@ import type { Config } from 'unique-names-generator';
|
|||||||
import generator from 'generate-password';
|
import generator from 'generate-password';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { promises as dns } from 'dns';
|
import { promises as dns } from 'dns';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
import { PrismaClient } from '@prisma/client';
|
import { PrismaClient } from '@prisma/client';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import sshConfig from 'ssh-config';
|
import sshConfig from 'ssh-config';
|
||||||
import jsonwebtoken from 'jsonwebtoken';
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import { checkContainer, removeContainer } from './docker';
|
import { checkContainer, removeContainer } from './docker';
|
||||||
import { day } from './dayjs';
|
import { day } from './dayjs';
|
||||||
import { saveBuildLog } from './buildPacks/common';
|
import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common';
|
||||||
import { scheduler } from './scheduler';
|
import { scheduler } from './scheduler';
|
||||||
|
import type { ExecaChildProcess } from 'execa';
|
||||||
|
|
||||||
export const version = '3.11.13';
|
export const version = '3.12.0';
|
||||||
export const isDev = process.env.NODE_ENV === 'development';
|
export const isDev = process.env.NODE_ENV === 'development';
|
||||||
|
export const sentryDSN = 'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216';
|
||||||
const algorithm = 'aes-256-ctr';
|
const algorithm = 'aes-256-ctr';
|
||||||
const customConfig: Config = {
|
const customConfig: Config = {
|
||||||
dictionaries: [adjectives, colors, animals],
|
dictionaries: [adjectives, colors, animals],
|
||||||
@ -62,7 +64,6 @@ const otherTraefikEndpoint = isDev
|
|||||||
: 'http://coolify:3000/webhooks/traefik/other.json';
|
: 'http://coolify:3000/webhooks/traefik/other.json';
|
||||||
|
|
||||||
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
||||||
export const asyncExecShell = util.promisify(exec);
|
|
||||||
export const asyncExecShellStream = async ({
|
export const asyncExecShellStream = async ({
|
||||||
debug,
|
debug,
|
||||||
buildId,
|
buildId,
|
||||||
@ -302,7 +303,7 @@ export async function isDomainConfigured({
|
|||||||
|
|
||||||
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||||
});
|
});
|
||||||
@ -507,36 +508,13 @@ export async function createRemoteEngineConfiguration(id: string) {
|
|||||||
remoteUser
|
remoteUser
|
||||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||||
// Needed for remote docker compose
|
|
||||||
// const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell(
|
|
||||||
// `ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l`
|
|
||||||
// );
|
|
||||||
// if (numberOfSSHAgentsRunning !== '' && Number(numberOfSSHAgentsRunning.trim()) == 0) {
|
|
||||||
// try {
|
|
||||||
// await fs.stat(`/tmp/coolify-ssh-agent.pid`);
|
|
||||||
// await fs.rm(`/tmp/coolify-ssh-agent.pid`);
|
|
||||||
// } catch (error) { }
|
|
||||||
// await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`);
|
|
||||||
// }
|
|
||||||
// await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`);
|
|
||||||
|
|
||||||
// const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell(
|
|
||||||
// `ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l`
|
|
||||||
// );
|
|
||||||
// if (numberOfSSHTunnelsRunning !== '' && Number(numberOfSSHTunnelsRunning.trim()) == 0) {
|
|
||||||
// try {
|
|
||||||
// await asyncExecShell(
|
|
||||||
// `SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}`
|
|
||||||
// );
|
|
||||||
// } catch (error) { }
|
|
||||||
// }
|
|
||||||
const config = sshConfig.parse('');
|
const config = sshConfig.parse('');
|
||||||
const Host = `${remoteIpAddress}-remote`
|
const Host = `${remoteIpAddress}-remote`
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`ssh-keygen -R ${Host}`);
|
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||||
await asyncExecShell(`ssh-keygen -R ${remoteIpAddress}`);
|
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||||
await asyncExecShell(`ssh-keygen -R localhost:${localPort}`);
|
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
|
||||||
|
|
||||||
@ -565,56 +543,130 @@ export async function createRemoteEngineConfiguration(id: string) {
|
|||||||
}
|
}
|
||||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
||||||
}
|
}
|
||||||
export async function executeSSHCmd({ dockerId, command }) {
|
export async function executeCommand({ command, dockerId = null, sshCommand = false, shell = false, stream = false, buildId, applicationId, debug }: { command: string, sshCommand?: boolean, shell?: boolean, stream?: boolean, dockerId?: string, buildId?: string, applicationId?: string, debug?: boolean }): Promise<ExecaChildProcess<string>> {
|
||||||
const { execaCommand } = await import('execa')
|
const { execa, execaCommand } = await import('execa')
|
||||||
let { remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
const { parse } = await import('shell-quote')
|
||||||
if (remoteEngine) {
|
const parsedCommand = parse(command);
|
||||||
await createRemoteEngineConfiguration(dockerId)
|
const dockerCommand = parsedCommand[0];
|
||||||
}
|
const dockerArgs = parsedCommand.slice(1);
|
||||||
if (process.env.CODESANDBOX_HOST) {
|
|
||||||
if (command.startsWith('docker compose')) {
|
if (dockerId) {
|
||||||
command = command.replace(/docker compose/gi, 'docker-compose')
|
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
||||||
|
if (remoteEngine) {
|
||||||
|
await createRemoteEngineConfiguration(dockerId);
|
||||||
|
engine = `ssh://${remoteIpAddress}-remote`;
|
||||||
|
} else {
|
||||||
|
engine = 'unix:///var/run/docker.sock';
|
||||||
}
|
}
|
||||||
}
|
if (process.env.CODESANDBOX_HOST) {
|
||||||
return await execaCommand(`ssh ${remoteIpAddress}-remote ${command}`)
|
if (command.startsWith('docker compose')) {
|
||||||
}
|
command = command.replace(/docker compose/gi, 'docker-compose');
|
||||||
export async function executeDockerCmd({ debug, buildId, applicationId, dockerId, command }: { debug?: boolean, buildId?: string, applicationId?: string, dockerId: string, command: string }): Promise<any> {
|
}
|
||||||
const { execaCommand } = await import('execa')
|
}
|
||||||
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
if (sshCommand) {
|
||||||
if (remoteEngine) {
|
if (shell) {
|
||||||
await createRemoteEngineConfiguration(dockerId);
|
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`);
|
||||||
engine = `ssh://${remoteIpAddress}-remote`;
|
}
|
||||||
|
return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]);
|
||||||
|
}
|
||||||
|
if (stream) {
|
||||||
|
return await new Promise(async (resolve, reject) => {
|
||||||
|
let subprocess = null;
|
||||||
|
if (shell) {
|
||||||
|
subprocess = execaCommand(command, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
subprocess = execa(dockerCommand, dockerArgs, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const logs = [];
|
||||||
|
subprocess.stdout.on('data', async (data) => {
|
||||||
|
const stdout = data.toString();
|
||||||
|
const array = stdout.split('\n');
|
||||||
|
for (const line of array) {
|
||||||
|
if (line !== '\n' && line !== '') {
|
||||||
|
const log = {
|
||||||
|
line: `${line.replace('\n', '')}`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
}
|
||||||
|
logs.push(log);
|
||||||
|
if (debug) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
subprocess.stderr.on('data', async (data) => {
|
||||||
|
const stderr = data.toString();
|
||||||
|
const array = stderr.split('\n');
|
||||||
|
for (const line of array) {
|
||||||
|
if (line !== '\n' && line !== '') {
|
||||||
|
const log = {
|
||||||
|
line: `${line.replace('\n', '')}`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
}
|
||||||
|
logs.push(log);
|
||||||
|
if (debug) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
subprocess.on('exit', async (code) => {
|
||||||
|
if (code === 0) {
|
||||||
|
resolve(code);
|
||||||
|
} else {
|
||||||
|
if (!debug) {
|
||||||
|
for (const log of logs) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reject(code);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
if (shell) {
|
||||||
|
return await execaCommand(command, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return await execa(dockerCommand, dockerArgs, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
engine = 'unix:///var/run/docker.sock';
|
if (shell) {
|
||||||
}
|
return execaCommand(command, { shell: true });
|
||||||
if (process.env.CODESANDBOX_HOST) {
|
|
||||||
if (command.startsWith('docker compose')) {
|
|
||||||
command = command.replace(/docker compose/gi, 'docker-compose');
|
|
||||||
}
|
}
|
||||||
|
return await execa(dockerCommand, dockerArgs);
|
||||||
}
|
}
|
||||||
if (command.startsWith(`docker build`) || command.startsWith(`pack build`) || command.startsWith(`docker compose build`)) {
|
|
||||||
return await asyncExecShellStream({ debug, buildId, applicationId, command, engine });
|
|
||||||
}
|
|
||||||
return await execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }, shell: true })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function startTraefikProxy(id: string): Promise<void> {
|
export async function startTraefikProxy(id: string): Promise<void> {
|
||||||
const { engine, network, remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id } })
|
const { engine, network, remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id } })
|
||||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-proxy', remove: true });
|
const { found } = await checkContainer({ dockerId: id, container: 'coolify-proxy', remove: true });
|
||||||
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
||||||
|
|
||||||
if (!found) {
|
if (!found) {
|
||||||
const { stdout: coolifyNetwork } = await executeDockerCmd({
|
const { stdout: coolifyNetwork } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!coolifyNetwork) {
|
if (!coolifyNetwork) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network create --attachable coolify-infra`
|
command: `docker network create --attachable coolify-infra`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const { stdout: Config } = await executeDockerCmd({
|
const { stdout: Config } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||||
});
|
});
|
||||||
@ -629,7 +681,7 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
}
|
}
|
||||||
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
||||||
}
|
}
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker run --restart always \
|
command: `docker run --restart always \
|
||||||
--add-host 'host.docker.internal:host-gateway' \
|
--add-host 'host.docker.internal:host-gateway' \
|
||||||
@ -654,7 +706,6 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
||||||
--log.level=error`
|
--log.level=error`
|
||||||
});
|
});
|
||||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
|
||||||
await prisma.destinationDocker.update({
|
await prisma.destinationDocker.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: true }
|
data: { isCoolifyProxyUsed: true }
|
||||||
@ -678,13 +729,13 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
|
|
||||||
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
||||||
const { id } = destination;
|
const { id } = destination;
|
||||||
const { stdout: networks } = await executeDockerCmd({
|
const { stdout: networks } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
||||||
});
|
});
|
||||||
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
||||||
if (!configuredNetworks.includes(destination.network)) {
|
if (!configuredNetworks.includes(destination.network)) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destination.id,
|
dockerId: destination.id,
|
||||||
command: `docker network connect ${destination.network} coolify-proxy`
|
command: `docker network connect ${destination.network} coolify-proxy`
|
||||||
});
|
});
|
||||||
@ -699,13 +750,12 @@ export async function stopTraefikProxy(
|
|||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: false }
|
data: { isCoolifyProxyUsed: false }
|
||||||
});
|
});
|
||||||
const { id: settingsId } = await prisma.setting.findFirst({});
|
|
||||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
|
||||||
try {
|
try {
|
||||||
if (found) {
|
if (found) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`
|
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -714,9 +764,7 @@ export async function stopTraefikProxy(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function listSettings(): Promise<any> {
|
export async function listSettings(): Promise<any> {
|
||||||
const settings = await prisma.setting.findFirst({});
|
return await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
|
|
||||||
return settings;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateToken() {
|
export function generateToken() {
|
||||||
@ -1075,6 +1123,7 @@ export async function makeLabelForStandaloneDatabase({ id, image, volume }) {
|
|||||||
'coolify.managed=true',
|
'coolify.managed=true',
|
||||||
`coolify.version=${version}`,
|
`coolify.version=${version}`,
|
||||||
`coolify.type=standalone-database`,
|
`coolify.type=standalone-database`,
|
||||||
|
`coolify.name=${database.name}`,
|
||||||
`coolify.configuration=${base64Encode(
|
`coolify.configuration=${base64Encode(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
version,
|
version,
|
||||||
@ -1092,7 +1141,7 @@ export const createDirectories = async ({
|
|||||||
repository: string;
|
repository: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
}): Promise<{ workdir: string; repodir: string }> => {
|
}): Promise<{ workdir: string; repodir: string }> => {
|
||||||
repository = repository.replaceAll(' ', '')
|
if (repository) repository = repository.replaceAll(' ', '')
|
||||||
const repodir = `/tmp/build-sources/${repository}/`;
|
const repodir = `/tmp/build-sources/${repository}/`;
|
||||||
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
||||||
let workdirFound = false;
|
let workdirFound = false;
|
||||||
@ -1100,9 +1149,9 @@ export const createDirectories = async ({
|
|||||||
workdirFound = !!(await fs.stat(workdir));
|
workdirFound = !!(await fs.stat(workdir));
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
if (workdirFound) {
|
if (workdirFound) {
|
||||||
await asyncExecShell(`rm -fr ${workdir}`);
|
await executeCommand({ command: `rm -fr ${workdir}` });
|
||||||
}
|
}
|
||||||
await asyncExecShell(`mkdir -p ${workdir}`);
|
await executeCommand({ command: `mkdir -p ${workdir}` });
|
||||||
return {
|
return {
|
||||||
workdir,
|
workdir,
|
||||||
repodir
|
repodir
|
||||||
@ -1118,7 +1167,7 @@ export async function stopDatabaseContainer(database: any): Promise<boolean> {
|
|||||||
} = database;
|
} = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||||
});
|
});
|
||||||
@ -1146,9 +1195,10 @@ export async function stopTcpHttpProxy(
|
|||||||
const { found } = await checkContainer({ dockerId, container });
|
const { found } = await checkContainer({ dockerId, container });
|
||||||
try {
|
try {
|
||||||
if (found) {
|
if (found) {
|
||||||
return await executeDockerCmd({
|
return await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -1170,34 +1220,34 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) {
|
|||||||
} = database;
|
} = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type === 'mysql') {
|
if (type === 'mysql') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
|
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
|
||||||
});
|
});
|
||||||
} else if (type === 'mariadb') {
|
} else if (type === 'mariadb') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
|
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
|
||||||
});
|
});
|
||||||
} else if (type === 'postgresql') {
|
} else if (type === 'postgresql') {
|
||||||
if (isRoot) {
|
if (isRoot) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if (type === 'mongodb') {
|
} else if (type === 'mongodb') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
||||||
});
|
});
|
||||||
} else if (type === 'redis') {
|
} else if (type === 'redis') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
||||||
});
|
});
|
||||||
@ -1371,7 +1421,7 @@ export async function startTraefikTCPProxy(
|
|||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
if (foundDependentContainer && !found) {
|
if (foundDependentContainer && !found) {
|
||||||
const { stdout: Config } = await executeDockerCmd({
|
const { stdout: Config } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||||
});
|
});
|
||||||
@ -1418,16 +1468,17 @@ export async function startTraefikTCPProxy(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
||||||
});
|
});
|
||||||
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
||||||
}
|
}
|
||||||
if (!foundDependentContainer && found) {
|
if (!foundDependentContainer && found) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -1487,12 +1538,17 @@ export function makeLabelForServices(type) {
|
|||||||
}
|
}
|
||||||
export function errorHandler({
|
export function errorHandler({
|
||||||
status = 500,
|
status = 500,
|
||||||
message = 'Unknown error.'
|
message = 'Unknown error.',
|
||||||
|
type = 'normal'
|
||||||
}: {
|
}: {
|
||||||
status: number;
|
status: number;
|
||||||
message: string | any;
|
message: string | any;
|
||||||
|
type?: string | null;
|
||||||
}) {
|
}) {
|
||||||
if (message.message) message = message.message;
|
if (message.message) message = message.message;
|
||||||
|
if (type === 'normal') {
|
||||||
|
Sentry.captureException(message);
|
||||||
|
}
|
||||||
throw { status, message };
|
throw { status, message };
|
||||||
}
|
}
|
||||||
export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> {
|
export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> {
|
||||||
@ -1531,9 +1587,9 @@ export async function stopBuild(buildId, applicationId) {
|
|||||||
scheduler.workers.get('deployApplication').postMessage('cancel');
|
scheduler.workers.get('deployApplication').postMessage('cancel');
|
||||||
}
|
}
|
||||||
await cleanupDB(buildId, applicationId);
|
await cleanupDB(buildId, applicationId);
|
||||||
return reject(new Error('Deployment canceled.'));
|
return reject(new Error('Canceled.'));
|
||||||
}
|
}
|
||||||
const { stdout: buildContainers } = await executeDockerCmd({
|
const { stdout: buildContainers } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'`
|
command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'`
|
||||||
});
|
});
|
||||||
@ -1564,7 +1620,7 @@ async function cleanupDB(buildId: string, applicationId: string) {
|
|||||||
if (data?.status === 'queued' || data?.status === 'running') {
|
if (data?.status === 'queued' || data?.status === 'running') {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
|
||||||
}
|
}
|
||||||
await saveBuildLog({ line: 'Deployment canceled.', buildId, applicationId });
|
await saveBuildLog({ line: 'Canceled.', buildId, applicationId });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function convertTolOldVolumeNames(type) {
|
export function convertTolOldVolumeNames(type) {
|
||||||
@ -1576,36 +1632,60 @@ export function convertTolOldVolumeNames(type) {
|
|||||||
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||||
// Cleanup old coolify images
|
// Cleanup old coolify images
|
||||||
try {
|
try {
|
||||||
let { stdout: images } = await executeDockerCmd({
|
let { stdout: images } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`
|
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
|
|
||||||
images = images.trim();
|
images = images.trim();
|
||||||
if (images) {
|
if (images) {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` });
|
await executeCommand({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r`, shell: true });
|
||||||
}
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
if (lowDiskSpace || force) {
|
if (lowDiskSpace || force) {
|
||||||
// if (isDev) {
|
// Cleanup images that are not used
|
||||||
// if (!force) console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`);
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({ dockerId, command: `docker image prune -f` });
|
||||||
|
} catch (error) { }
|
||||||
|
|
||||||
|
const { numberOfDockerImagesKeptLocally } = await prisma.setting.findUnique({ where: { id: '0' } })
|
||||||
|
const { stdout: images } = await executeCommand({
|
||||||
|
dockerId,
|
||||||
|
command: `docker images | grep -v "<none>" | grep -v REPOSITORY | awk '{print $1, $2}'`,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
|
const imagesArray = images.trim().replaceAll(' ', ':').split('\n');
|
||||||
|
const imagesSet = new Set(imagesArray.map((image) => image.split(':')[0]));
|
||||||
|
let deleteImage = []
|
||||||
|
for (const image of imagesSet) {
|
||||||
|
let keepImage = []
|
||||||
|
for (const image2 of imagesArray) {
|
||||||
|
if (image2.startsWith(image)) {
|
||||||
|
if (keepImage.length >= numberOfDockerImagesKeptLocally) {
|
||||||
|
deleteImage.push(image2)
|
||||||
|
} else {
|
||||||
|
keepImage.push(image2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const image of deleteImage) {
|
||||||
|
await executeCommand({ dockerId, command: `docker image rm -f ${image}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prune coolify managed containers
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||||
});
|
});
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
try {
|
|
||||||
await executeDockerCmd({ dockerId, command: `docker image prune -f` });
|
|
||||||
} catch (error) { }
|
|
||||||
try {
|
|
||||||
await executeDockerCmd({ dockerId, command: `docker image prune -a -f` });
|
|
||||||
} catch (error) { }
|
|
||||||
// Cleanup build caches
|
// Cleanup build caches
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` });
|
await executeCommand({ dockerId, command: `docker builder prune -a -f` });
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1687,3 +1767,17 @@ export function decryptApplication(application: any) {
|
|||||||
return application;
|
return application;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function pushToRegistry(application: any, workdir: string, tag: string, imageName: string, customTag: string) {
|
||||||
|
const location = `${workdir}/.docker`
|
||||||
|
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`
|
||||||
|
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: application.destinationDockerId,
|
||||||
|
command: tagCommand
|
||||||
|
})
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: application.destinationDockerId,
|
||||||
|
command: pushCommand
|
||||||
|
})
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
import { executeDockerCmd } from './common';
|
import { executeCommand } from './common';
|
||||||
|
|
||||||
export function formatLabelsOnDocker(data) {
|
export function formatLabelsOnDocker(data) {
|
||||||
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
||||||
@ -16,7 +16,7 @@ export function formatLabelsOnDocker(data) {
|
|||||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
||||||
let containerFound = false;
|
let containerFound = false;
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker inspect --format '{{json .State}}' ${container}`
|
`docker inspect --format '{{json .State}}' ${container}`
|
||||||
@ -28,27 +28,26 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
|||||||
const isRestarting = status === 'restarting'
|
const isRestarting = status === 'restarting'
|
||||||
const isExited = status === 'exited'
|
const isExited = status === 'exited'
|
||||||
if (status === 'created') {
|
if (status === 'created') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker rm ${container}`
|
`docker rm ${container}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (remove && status === 'exited') {
|
if (remove && status === 'exited') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker rm ${container}`
|
`docker rm ${container}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
found: containerFound,
|
found: containerFound,
|
||||||
status: {
|
status: {
|
||||||
isRunning,
|
isRunning,
|
||||||
isRestarting,
|
isRestarting,
|
||||||
isExited
|
isExited
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -63,7 +62,7 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
|||||||
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
||||||
let isExited = false;
|
let isExited = false;
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||||
if (stdout.trim() === 'exited') {
|
if (stdout.trim() === 'exited') {
|
||||||
isExited = true;
|
isExited = true;
|
||||||
}
|
}
|
||||||
@ -82,13 +81,13 @@ export async function removeContainer({
|
|||||||
dockerId: string;
|
dockerId: string;
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||||
if (JSON.parse(stdout).Running) {
|
if (JSON.parse(stdout).Running) {
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
}
|
}
|
||||||
if (JSON.parse(stdout).Status === 'exited') {
|
if (JSON.parse(stdout).Status === 'exited') {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import jsonwebtoken from 'jsonwebtoken';
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import { saveBuildLog } from '../buildPacks/common';
|
import { saveBuildLog } from '../buildPacks/common';
|
||||||
import { asyncExecShell, decrypt, prisma } from '../common';
|
import { decrypt, executeCommand, prisma } from '../common';
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
@ -9,6 +9,7 @@ export default async function ({
|
|||||||
githubAppId,
|
githubAppId,
|
||||||
repository,
|
repository,
|
||||||
apiUrl,
|
apiUrl,
|
||||||
|
gitCommitHash,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@ -20,6 +21,7 @@ export default async function ({
|
|||||||
githubAppId: string;
|
githubAppId: string;
|
||||||
repository: string;
|
repository: string;
|
||||||
apiUrl: string;
|
apiUrl: string;
|
||||||
|
gitCommitHash?: string;
|
||||||
htmlUrl: string;
|
htmlUrl: string;
|
||||||
branch: string;
|
branch: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
@ -28,16 +30,24 @@ export default async function ({
|
|||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
const { default: got } = await import('got')
|
const { default: got } = await import('got')
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||||
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
await asyncExecShell(
|
if (gitCommitHash) {
|
||||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
await saveBuildLog({
|
||||||
);
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await executeCommand({
|
||||||
|
command:
|
||||||
|
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||||
@ -62,15 +72,23 @@ export default async function ({
|
|||||||
})
|
})
|
||||||
.json();
|
.json();
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
await asyncExecShell(
|
if (gitCommitHash) {
|
||||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
await saveBuildLog({
|
||||||
);
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await executeCommand({
|
||||||
|
command:
|
||||||
|
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
}
|
}
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||||
|
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
import { saveBuildLog } from "../buildPacks/common";
|
import { saveBuildLog } from "../buildPacks/common";
|
||||||
import { asyncExecShell } from "../common";
|
import { executeCommand } from "../common";
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
workdir,
|
workdir,
|
||||||
repodir,
|
repodir,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
|
gitCommitHash,
|
||||||
repository,
|
repository,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@ -20,34 +21,43 @@ export default async function ({
|
|||||||
branch: string;
|
branch: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
repodir: string;
|
repodir: string;
|
||||||
|
gitCommitHash: string;
|
||||||
privateSshKey: string;
|
privateSshKey: string;
|
||||||
customPort: number;
|
customPort: number;
|
||||||
forPublic: boolean;
|
forPublic: boolean;
|
||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
|
||||||
|
|
||||||
if (!forPublic) {
|
if (!forPublic) {
|
||||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||||
}
|
}
|
||||||
|
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
|
if (gitCommitHash) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
command:
|
||||||
|
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
command:
|
||||||
|
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
import { isDev } from "./common";
|
import { isARM, isDev } from "./common";
|
||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
export async function getTemplates() {
|
export async function getTemplates() {
|
||||||
const templatePath = isDev ? './templates.json' : '/app/templates.json';
|
const templatePath = isDev ? './templates.json' : '/app/templates.json';
|
||||||
const open = await fs.open(templatePath, 'r');
|
const open = await fs.open(templatePath, 'r');
|
||||||
let data;
|
|
||||||
try {
|
try {
|
||||||
data = await open.readFile({ encoding: 'utf-8' });
|
let data = await open.readFile({ encoding: 'utf-8' });
|
||||||
return JSON.parse(data);
|
let jsonData = JSON.parse(data)
|
||||||
|
if (isARM(process.arch)) {
|
||||||
|
jsonData = jsonData.filter(d => d.arch !== 'amd64')
|
||||||
|
}
|
||||||
|
return jsonData;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return []
|
return []
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -2,7 +2,7 @@ import type { FastifyReply, FastifyRequest } from 'fastify';
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeDockerCmd, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeCommand, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
||||||
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
||||||
|
|
||||||
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
||||||
@ -15,14 +15,19 @@ export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Could not stop containers.' }
|
throw { status: 500, message: 'Could not stop containers.' }
|
||||||
@ -182,19 +187,36 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
// Workaround: Stop old minio proxies
|
// Workaround: Stop old minio proxies
|
||||||
if (service.type === 'minio') {
|
if (service.type === 'minio') {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container stop -t 0`
|
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||||
});
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container rm -f`
|
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||||
});
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
}
|
}
|
||||||
return {}
|
return {}
|
||||||
@ -205,16 +227,16 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
||||||
try {
|
try {
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
||||||
await asyncSleep(1000);
|
await asyncSleep(1000);
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
||||||
}
|
}
|
||||||
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||||
@ -226,7 +248,7 @@ export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply:
|
|||||||
destinationDocker,
|
destinationDocker,
|
||||||
} = await getServiceFromDB({ id, teamId });
|
} = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id} migrate`
|
command: `docker exec ${id} migrate`
|
||||||
})
|
})
|
||||||
|
@ -7,12 +7,12 @@ import yaml from 'js-yaml';
|
|||||||
import csv from 'csvtojson';
|
import csv from 'csvtojson';
|
||||||
|
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
import { saveDockerRegistryCredentials, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||||
import { checkDomainsIsValidInDNS, checkExposedPort, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
import { checkDomainsIsValidInDNS, checkExposedPort, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeCommand, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||||
import { checkContainer, formatLabelsOnDocker, removeContainer } from '../../../../lib/docker';
|
import { checkContainer, formatLabelsOnDocker, removeContainer } from '../../../../lib/docker';
|
||||||
|
|
||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds } from './types';
|
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds, RestartApplication } from './types';
|
||||||
import { OnlyId } from '../../../../types';
|
import { OnlyId } from '../../../../types';
|
||||||
|
|
||||||
function filterObject(obj, callback) {
|
function filterObject(obj, callback) {
|
||||||
@ -78,7 +78,7 @@ export async function cleanupUnconfiguredApplications(request: FastifyRequest<an
|
|||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
||||||
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
|
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
|
||||||
})
|
})
|
||||||
@ -113,7 +113,7 @@ export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const application: any = await getApplicationFromDB(id, teamId);
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
if (application.buildPack === 'compose') {
|
if (application.buildPack === 'compose') {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
||||||
@ -241,7 +241,8 @@ export async function getApplicationFromDB(id: string, teamId: string) {
|
|||||||
secrets: true,
|
secrets: true,
|
||||||
persistentStorage: true,
|
persistentStorage: true,
|
||||||
connectedDatabase: true,
|
connectedDatabase: true,
|
||||||
previewApplication: true
|
previewApplication: true,
|
||||||
|
dockerRegistry: true
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (!application) {
|
if (!application) {
|
||||||
@ -280,7 +281,7 @@ export async function getApplicationFromDBWebhook(projectId: number, branch: str
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (applications.length === 0) {
|
if (applications.length === 0) {
|
||||||
throw { status: 500, message: 'Application not configured.' }
|
throw { status: 500, message: 'Application not configured.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
applications = applications.map((application: any) => {
|
applications = applications.map((application: any) => {
|
||||||
application = decryptApplication(application);
|
application = decryptApplication(application);
|
||||||
@ -302,8 +303,8 @@ export async function getApplicationFromDBWebhook(projectId: number, branch: str
|
|||||||
|
|
||||||
return applications;
|
return applications;
|
||||||
|
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
||||||
@ -326,13 +327,16 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
denoMainFile,
|
denoMainFile,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
|
gitCommitHash,
|
||||||
baseImage,
|
baseImage,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
baseDatabaseBranch,
|
baseDatabaseBranch,
|
||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName
|
||||||
} = request.body
|
} = request.body
|
||||||
if (port) port = Number(port);
|
if (port) port = Number(port);
|
||||||
if (exposePort) {
|
if (exposePort) {
|
||||||
@ -350,6 +354,7 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
});
|
});
|
||||||
if (baseDatabaseBranch) {
|
if (baseDatabaseBranch) {
|
||||||
@ -364,11 +369,14 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
pythonVariable,
|
pythonVariable,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
|
gitCommitHash,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName,
|
||||||
...defaultConfiguration,
|
...defaultConfiguration,
|
||||||
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
|
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
|
||||||
}
|
}
|
||||||
@ -382,6 +390,7 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
exposePort,
|
exposePort,
|
||||||
pythonWSGI,
|
pythonWSGI,
|
||||||
pythonModule,
|
pythonModule,
|
||||||
|
gitCommitHash,
|
||||||
pythonVariable,
|
pythonVariable,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
@ -390,6 +399,8 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName,
|
||||||
...defaultConfiguration
|
...defaultConfiguration
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -438,16 +449,17 @@ export async function stopPreviewApplication(request: FastifyRequest<StopPreview
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function restartApplication(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function restartApplication(request: FastifyRequest<RestartApplication>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
|
const { imageId = null } = request.body
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user
|
||||||
let application: any = await getApplicationFromDB(id, teamId);
|
let application: any = await getApplicationFromDB(id, teamId);
|
||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
const buildId = cuid();
|
const buildId = cuid();
|
||||||
const { id: dockerId, network } = application.destinationDocker;
|
const { id: dockerId, network } = application.destinationDocker;
|
||||||
const { secrets, pullmergeRequestId, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
const { dockerRegistry, secrets, pullmergeRequestId, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
||||||
|
let location = null;
|
||||||
const envs = [
|
const envs = [
|
||||||
`PORT=${port}`
|
`PORT=${port}`
|
||||||
];
|
];
|
||||||
@ -470,28 +482,48 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
const { workdir } = await createDirectories({ repository, buildId });
|
const { workdir } = await createDirectories({ repository, buildId });
|
||||||
const labels = []
|
const labels = []
|
||||||
let image = null
|
let image = null
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'` })
|
if (imageId) {
|
||||||
const containersArray = container.trim().split('\n');
|
image = imageId
|
||||||
for (const container of containersArray) {
|
} else {
|
||||||
const containerObj = formatLabelsOnDocker(container);
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'` })
|
||||||
image = containerObj[0].Image
|
const containersArray = container.trim().split('\n');
|
||||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
for (const container of containersArray) {
|
||||||
if (key.startsWith('coolify')) {
|
const containerObj = formatLabelsOnDocker(container);
|
||||||
labels.push(`${key}=${containerObj[0].Labels[key]}`)
|
image = containerObj[0].Image
|
||||||
}
|
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||||
})
|
if (key.startsWith('coolify')) {
|
||||||
|
labels.push(`${key}=${containerObj[0].Labels[key]}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let imageFound = false;
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageFoundLocally = false;
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker image inspect ${image}`
|
command: `docker image inspect ${image}`
|
||||||
})
|
})
|
||||||
imageFound = true;
|
imageFoundLocally = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
if (!imageFound) {
|
let imageFoundRemotely = false;
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
|
dockerId,
|
||||||
|
command: `docker ${location ? `--config ${location}` : ''} pull ${image}`
|
||||||
|
})
|
||||||
|
imageFoundRemotely = true;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!imageFoundLocally && !imageFoundRemotely) {
|
||||||
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
||||||
}
|
}
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
@ -537,9 +569,14 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
try {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
await executeCommand({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
return reply.code(201).send();
|
return reply.code(201).send();
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||||
@ -555,7 +592,7 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
|
|||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
const { id: dockerId } = application.destinationDocker;
|
const { id: dockerId } = application.destinationDocker;
|
||||||
if (application.buildPack === 'compose') {
|
if (application.buildPack === 'compose') {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
||||||
@ -590,7 +627,7 @@ export async function deleteApplication(request: FastifyRequest<DeleteApplicatio
|
|||||||
include: { destinationDocker: true }
|
include: { destinationDocker: true }
|
||||||
});
|
});
|
||||||
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||||
})
|
})
|
||||||
@ -676,6 +713,47 @@ export async function getUsage(request) {
|
|||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export async function getDockerImages(request) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
|
let imagesAvailables = [];
|
||||||
|
try {
|
||||||
|
const { stdout } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}' | grep -i ${id} | grep -v cache`, shell: true });
|
||||||
|
const { stdout: runningImage } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}` });
|
||||||
|
const images = stdout.trim().split('\n');
|
||||||
|
|
||||||
|
for (const image of images) {
|
||||||
|
const [repository, tag, createdAt] = image.split('#');
|
||||||
|
if (tag.includes('-')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const [year, time] = createdAt.split(' ');
|
||||||
|
imagesAvailables.push({
|
||||||
|
repository,
|
||||||
|
tag,
|
||||||
|
createdAt: day(year + time).unix()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
imagesAvailables = imagesAvailables.sort((a, b) => b.tag - a.tag);
|
||||||
|
|
||||||
|
return {
|
||||||
|
imagesAvailables,
|
||||||
|
runningImage
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
imagesAvailables,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getUsageByContainer(request) {
|
export async function getUsageByContainer(request) {
|
||||||
try {
|
try {
|
||||||
@ -718,22 +796,37 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
|||||||
await prisma.application.update({ where: { id }, data: { configHash } });
|
await prisma.application.update({ where: { id }, data: { configHash } });
|
||||||
}
|
}
|
||||||
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
||||||
await prisma.build.create({
|
if (application.gitSourceId) {
|
||||||
data: {
|
await prisma.build.create({
|
||||||
id: buildId,
|
data: {
|
||||||
applicationId: id,
|
id: buildId,
|
||||||
sourceBranch: branch,
|
applicationId: id,
|
||||||
branch: application.branch,
|
sourceBranch: branch,
|
||||||
pullmergeRequestId: pullmergeRequestId?.toString(),
|
branch: application.branch,
|
||||||
forceRebuild,
|
pullmergeRequestId: pullmergeRequestId?.toString(),
|
||||||
destinationDockerId: application.destinationDocker?.id,
|
forceRebuild,
|
||||||
gitSourceId: application.gitSource?.id,
|
destinationDockerId: application.destinationDocker?.id,
|
||||||
githubAppId: application.gitSource?.githubApp?.id,
|
gitSourceId: application.gitSource?.id,
|
||||||
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
githubAppId: application.gitSource?.githubApp?.id,
|
||||||
status: 'queued',
|
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||||
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
|
status: 'queued',
|
||||||
}
|
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
|
||||||
});
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await prisma.build.create({
|
||||||
|
data: {
|
||||||
|
id: buildId,
|
||||||
|
applicationId: id,
|
||||||
|
branch: 'latest',
|
||||||
|
forceRebuild,
|
||||||
|
destinationDockerId: application.destinationDocker?.id,
|
||||||
|
status: 'queued',
|
||||||
|
type: 'manual'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
buildId
|
buildId
|
||||||
};
|
};
|
||||||
@ -748,20 +841,28 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
|||||||
export async function saveApplicationSource(request: FastifyRequest<SaveApplicationSource>, reply: FastifyReply) {
|
export async function saveApplicationSource(request: FastifyRequest<SaveApplicationSource>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const { gitSourceId, forPublic, type } = request.body
|
const { gitSourceId, forPublic, type, simpleDockerfile } = request.body
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
const publicGit = await prisma.gitSource.findFirst({ where: { type, forPublic } });
|
const publicGit = await prisma.gitSource.findFirst({ where: { type, forPublic } });
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { gitSource: { connect: { id: publicGit.id } } }
|
data: { gitSource: { connect: { id: publicGit.id } } }
|
||||||
});
|
});
|
||||||
} else {
|
}
|
||||||
|
if (simpleDockerfile) {
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { simpleDockerfile, settings: { update: { autodeploy: false } } }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (gitSourceId) {
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { gitSource: { connect: { id: gitSourceId } } }
|
data: { gitSource: { connect: { id: gitSourceId } } }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@ -819,7 +920,7 @@ export async function saveRepository(request, reply) {
|
|||||||
let { repository, branch, projectId, autodeploy, webhookToken, isPublicRepository = false } = request.body
|
let { repository, branch, projectId, autodeploy, webhookToken, isPublicRepository = false } = request.body
|
||||||
|
|
||||||
repository = repository.toLowerCase();
|
repository = repository.toLowerCase();
|
||||||
|
|
||||||
projectId = Number(projectId);
|
projectId = Number(projectId);
|
||||||
if (webhookToken) {
|
if (webhookToken) {
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
@ -864,11 +965,11 @@ export async function getBuildPack(request) {
|
|||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const application: any = await getApplicationFromDB(id, teamId);
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
return {
|
return {
|
||||||
type: application.gitSource.type,
|
type: application.gitSource?.type || 'dockerRegistry',
|
||||||
projectId: application.projectId,
|
projectId: application.projectId,
|
||||||
repository: application.repository,
|
repository: application.repository,
|
||||||
branch: application.branch,
|
branch: application.branch,
|
||||||
apiUrl: application.gitSource.apiUrl,
|
apiUrl: application.gitSource?.apiUrl || null,
|
||||||
isPublicRepository: application.settings.isPublicRepository
|
isPublicRepository: application.settings.isPublicRepository
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
@ -876,6 +977,16 @@ export async function getBuildPack(request) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function saveRegistry(request, reply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { registryId } = request.body
|
||||||
|
await prisma.application.update({ where: { id }, data: { dockerRegistry: { connect: { id: registryId } } } });
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
export async function saveBuildPack(request, reply) {
|
export async function saveBuildPack(request, reply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
@ -1072,7 +1183,7 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
const { workdir } = await createDirectories({ repository, buildId });
|
const { workdir } = await createDirectories({ repository, buildId });
|
||||||
const labels = []
|
const labels = []
|
||||||
let image = null
|
let image = null
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
|
||||||
const containersArray = container.trim().split('\n');
|
const containersArray = container.trim().split('\n');
|
||||||
for (const container of containersArray) {
|
for (const container of containersArray) {
|
||||||
const containerObj = formatLabelsOnDocker(container);
|
const containerObj = formatLabelsOnDocker(container);
|
||||||
@ -1085,7 +1196,7 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
}
|
}
|
||||||
let imageFound = false;
|
let imageFound = false;
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker image inspect ${image}`
|
command: `docker image inspect ${image}`
|
||||||
})
|
})
|
||||||
@ -1139,9 +1250,9 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
return reply.code(201).send();
|
return reply.code(201).send();
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||||
@ -1182,7 +1293,7 @@ export async function loadPreviews(request: FastifyRequest<OnlyId>) {
|
|||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||||
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
const { stdout } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
||||||
if (stdout === '') {
|
if (stdout === '') {
|
||||||
throw { status: 500, message: 'No previews found.' }
|
throw { status: 500, message: 'No previews found.' }
|
||||||
}
|
}
|
||||||
@ -1257,7 +1368,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
@ -1448,19 +1559,19 @@ export async function createdBranchDatabase(database: any, baseDatabaseBranch: s
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type === 'postgresql') {
|
if (type === 'postgresql') {
|
||||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} pg_dump -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/${baseDatabaseBranch}" --encoding=UTF8 --schema-only -f /tmp/${baseDatabaseBranch}.dump`
|
command: `docker exec ${id} pg_dump -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/${baseDatabaseBranch}" --encoding=UTF8 --schema-only -f /tmp/${baseDatabaseBranch}.dump`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "CREATE DATABASE branch_${pullmergeRequestId}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "CREATE DATABASE branch_${pullmergeRequestId}"`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/branch_${pullmergeRequestId}" -f /tmp/${baseDatabaseBranch}.dump`
|
command: `docker exec ${id} psql -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/branch_${pullmergeRequestId}" -f /tmp/${baseDatabaseBranch}.dump`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "ALTER DATABASE branch_${pullmergeRequestId} OWNER TO ${dbUser}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "ALTER DATABASE branch_${pullmergeRequestId} OWNER TO ${dbUser}"`
|
||||||
})
|
})
|
||||||
@ -1479,12 +1590,12 @@ export async function removeBranchDatabase(database: any, pullmergeRequestId: st
|
|||||||
if (type === 'postgresql') {
|
if (type === 'postgresql') {
|
||||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||||
// Terminate all connections to the database
|
// Terminate all connections to the database
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'branch_${pullmergeRequestId}' AND pid <> pg_backend_pid();"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'branch_${pullmergeRequestId}' AND pid <> pg_backend_pid();"`
|
||||||
})
|
})
|
||||||
|
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "DROP DATABASE branch_${pullmergeRequestId}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "DROP DATABASE branch_${pullmergeRequestId}"`
|
||||||
})
|
})
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import { FastifyPluginAsync } from 'fastify';
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
import { OnlyId } from '../../../../types';
|
import { OnlyId } from '../../../../types';
|
||||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getDockerImages, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRegistry, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
||||||
|
|
||||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartApplication, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
fastify.addHook('onRequest', async (request) => {
|
fastify.addHook('onRequest', async (request) => {
|
||||||
@ -21,7 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
|
|
||||||
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
||||||
|
|
||||||
fastify.post<OnlyId>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
fastify.post<RestartApplication>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
||||||
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||||
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
||||||
|
|
||||||
@ -45,7 +45,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
||||||
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
||||||
|
|
||||||
// fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
|
|
||||||
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
|
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
|
||||||
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
||||||
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||||
@ -53,6 +52,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
||||||
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
|
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
|
||||||
|
|
||||||
|
fastify.get('/:id/images', async (request) => await getDockerImages(request))
|
||||||
|
|
||||||
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
||||||
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
||||||
|
|
||||||
@ -64,6 +65,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||||
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/configuration/registry', async (request, reply) => await saveRegistry(request, reply));
|
||||||
|
|
||||||
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
||||||
|
|
||||||
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||||
|
@ -19,12 +19,15 @@ export interface SaveApplication extends OnlyId {
|
|||||||
denoMainFile: string,
|
denoMainFile: string,
|
||||||
denoOptions: string,
|
denoOptions: string,
|
||||||
baseImage: string,
|
baseImage: string,
|
||||||
|
gitCommitHash: string,
|
||||||
baseBuildImage: string,
|
baseBuildImage: string,
|
||||||
deploymentType: string,
|
deploymentType: string,
|
||||||
baseDatabaseBranch: string,
|
baseDatabaseBranch: string,
|
||||||
dockerComposeFile: string,
|
dockerComposeFile: string,
|
||||||
dockerComposeFileLocation: string,
|
dockerComposeFileLocation: string,
|
||||||
dockerComposeConfiguration: string
|
dockerComposeConfiguration: string,
|
||||||
|
simpleDockerfile: string,
|
||||||
|
dockerRegistryImageName: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface SaveApplicationSettings extends OnlyId {
|
export interface SaveApplicationSettings extends OnlyId {
|
||||||
@ -55,7 +58,7 @@ export interface GetImages {
|
|||||||
Body: { buildPack: string, deploymentType: string }
|
Body: { buildPack: string, deploymentType: string }
|
||||||
}
|
}
|
||||||
export interface SaveApplicationSource extends OnlyId {
|
export interface SaveApplicationSource extends OnlyId {
|
||||||
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string }
|
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string, simpleDockerfile?: string }
|
||||||
}
|
}
|
||||||
export interface CheckRepository extends OnlyId {
|
export interface CheckRepository extends OnlyId {
|
||||||
Querystring: { repository: string, branch: string }
|
Querystring: { repository: string, branch: string }
|
||||||
@ -140,4 +143,12 @@ export interface RestartPreviewApplication {
|
|||||||
id: string,
|
id: string,
|
||||||
pullmergeRequestId: string | null,
|
pullmergeRequestId: string | null,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
export interface RestartApplication {
|
||||||
|
Params: {
|
||||||
|
id: string,
|
||||||
|
},
|
||||||
|
Body: {
|
||||||
|
imageId: string | null,
|
||||||
|
}
|
||||||
}
|
}
|
@ -2,13 +2,20 @@ import { FastifyPluginAsync } from 'fastify';
|
|||||||
import { errorHandler, listSettings, version } from '../../../../lib/common';
|
import { errorHandler, listSettings, version } from '../../../../lib/common';
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
|
fastify.addHook('onRequest', async (request) => {
|
||||||
|
try {
|
||||||
|
await request.jwtVerify()
|
||||||
|
} catch(error) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
});
|
||||||
fastify.get('/', async (request) => {
|
fastify.get('/', async (request) => {
|
||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const settings = await listSettings()
|
const settings = await listSettings()
|
||||||
try {
|
try {
|
||||||
return {
|
return {
|
||||||
ipv4: teamId ? settings.ipv4 : 'nope',
|
ipv4: teamId ? settings.ipv4 : null,
|
||||||
ipv6: teamId ? settings.ipv6 : 'nope',
|
ipv6: teamId ? settings.ipv6 : null,
|
||||||
version,
|
version,
|
||||||
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
||||||
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
||||||
|
@ -3,7 +3,7 @@ import type { FastifyRequest } from 'fastify';
|
|||||||
import { FastifyReply } from 'fastify';
|
import { FastifyReply } from 'fastify';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeCommand, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
|
|
||||||
import type { OnlyId } from '../../../../types';
|
import type { OnlyId } from '../../../../types';
|
||||||
@ -89,7 +89,7 @@ export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const { destinationDockerId, destinationDocker } = database;
|
const { destinationDockerId, destinationDocker } = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
const { stdout } = await executeCommand({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||||
|
|
||||||
if (JSON.parse(stdout).Running) {
|
if (JSON.parse(stdout).Running) {
|
||||||
isRunning = true;
|
isRunning = true;
|
||||||
@ -208,7 +208,7 @@ export async function saveDatabaseDestination(request: FastifyRequest<SaveDataba
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type && version) {
|
if (type && version) {
|
||||||
const baseImage = getDatabaseImage(type, arch);
|
const baseImage = getDatabaseImage(type, arch);
|
||||||
executeDockerCmd({ dockerId, command: `docker pull ${baseImage}:${version}` })
|
executeCommand({ dockerId, command: `docker pull ${baseImage}:${version}` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return reply.code(201).send({})
|
return reply.code(201).send({})
|
||||||
@ -298,7 +298,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
|||||||
};
|
};
|
||||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||||
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
await executeCommand({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||||
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
@ -347,7 +347,7 @@ export async function getDatabaseLogs(request: FastifyRequest<GetDatabaseLogs>)
|
|||||||
// const found = await checkContainer({ dockerId, container: id })
|
// const found = await checkContainer({ dockerId, container: id })
|
||||||
// if (found) {
|
// if (found) {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
|
@ -4,7 +4,7 @@ import sshConfig from 'ssh-config'
|
|||||||
import fs from 'fs/promises'
|
import fs from 'fs/promises'
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
|
|
||||||
import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, executeSSHCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
import { createRemoteEngineConfiguration, decrypt, errorHandler, executeCommand, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
||||||
import { checkContainer } from '../../../../lib/docker';
|
import { checkContainer } from '../../../../lib/docker';
|
||||||
|
|
||||||
import type { OnlyId } from '../../../../types';
|
import type { OnlyId } from '../../../../types';
|
||||||
@ -79,9 +79,9 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
|
|||||||
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
if (engine) {
|
if (engine) {
|
||||||
const { stdout } = await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network ls --filter 'name=^${network}$' --format '{{json .}}'`);
|
const { stdout } = await await executeCommand({ command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'` });
|
||||||
if (stdout === '') {
|
if (stdout === '') {
|
||||||
await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network create --attachable ${network}`);
|
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
||||||
}
|
}
|
||||||
await prisma.destinationDocker.create({
|
await prisma.destinationDocker.create({
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||||
@ -103,7 +103,7 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
|
|||||||
return reply.code(201).send({ id: destination.id });
|
return reply.code(201).send({ id: destination.id });
|
||||||
} else {
|
} else {
|
||||||
const destination = await prisma.destinationDocker.create({
|
const destination = await prisma.destinationDocker.create({
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort }
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort: Number(remotePort) }
|
||||||
});
|
});
|
||||||
return reply.code(201).send({ id: destination.id })
|
return reply.code(201).send({ id: destination.id })
|
||||||
}
|
}
|
||||||
@ -122,13 +122,13 @@ export async function deleteDestination(request: FastifyRequest<OnlyId>) {
|
|||||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
||||||
if (isCoolifyProxyUsed) {
|
if (isCoolifyProxyUsed) {
|
||||||
if (engine || remoteVerified) {
|
if (engine || remoteVerified) {
|
||||||
const { stdout: found } = await executeDockerCmd({
|
const { stdout: found } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||||
})
|
})
|
||||||
if (found) {
|
if (found) {
|
||||||
await executeDockerCmd({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
await executeCommand({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
||||||
await executeDockerCmd({ dockerId: id, command: `docker network rm ${network}` })
|
await executeCommand({ dockerId: id, command: `docker network rm ${network}` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -203,22 +203,31 @@ export async function assignSSHKey(request: FastifyRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngineFn(id: string) {
|
export async function verifyRemoteDockerEngineFn(id: string) {
|
||||||
await createRemoteEngineConfiguration(id);
|
|
||||||
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
||||||
const host = `ssh://${remoteIpAddress}-remote`
|
const daemonJson = `daemon-${id}.json`
|
||||||
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
|
|
||||||
if (!stdout) {
|
|
||||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
|
|
||||||
}
|
|
||||||
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
|
|
||||||
if (!coolifyNetwork) {
|
|
||||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
|
|
||||||
}
|
|
||||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
|
||||||
try {
|
try {
|
||||||
const { stdout: daemonJson } = await executeSSHCmd({ dockerId: id, command: `cat /etc/docker/daemon.json` });
|
await executeCommand({ sshCommand: true, command: `docker network inspect ${network}`, dockerId: id });
|
||||||
let daemonJsonParsed = JSON.parse(daemonJson);
|
} catch (error) {
|
||||||
let isUpdated = false;
|
await executeCommand({ command: `docker network create --attachable ${network}`, dockerId: id });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await executeCommand({ sshCommand: true, command: `docker network inspect coolify-infra`, dockerId: id });
|
||||||
|
} catch (error) {
|
||||||
|
await executeCommand({ command: `docker network create --attachable coolify-infra`, dockerId: id });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||||
|
let isUpdated = false;
|
||||||
|
let daemonJsonParsed = {
|
||||||
|
"live-restore": true,
|
||||||
|
"features": {
|
||||||
|
"buildkit": true
|
||||||
|
}
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const { stdout: daemonJson } = await executeCommand({ sshCommand: true, dockerId: id, command: `cat /etc/docker/daemon.json` });
|
||||||
|
daemonJsonParsed = JSON.parse(daemonJson);
|
||||||
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
||||||
isUpdated = true;
|
isUpdated = true;
|
||||||
daemonJsonParsed['live-restore'] = true
|
daemonJsonParsed['live-restore'] = true
|
||||||
@ -230,21 +239,19 @@ export async function verifyRemoteDockerEngineFn(id: string) {
|
|||||||
buildkit: true
|
buildkit: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (isUpdated) {
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const daemonJsonParsed = {
|
isUpdated = true;
|
||||||
"live-restore": true,
|
}
|
||||||
"features": {
|
try {
|
||||||
"buildkit": true
|
if (isUpdated) {
|
||||||
}
|
await executeCommand({ shell: true, command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}` })
|
||||||
|
await executeCommand({ dockerId: id, command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json` });
|
||||||
|
await executeCommand({ command: `rm /tmp/${daemonJson}` })
|
||||||
|
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
||||||
}
|
}
|
||||||
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
|
|
||||||
} finally {
|
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Error while verifying remote docker engine')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||||
|
@ -4,7 +4,6 @@ import bcrypt from "bcryptjs";
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import {
|
import {
|
||||||
asyncExecShell,
|
|
||||||
asyncSleep,
|
asyncSleep,
|
||||||
cleanupDockerStorage,
|
cleanupDockerStorage,
|
||||||
errorHandler,
|
errorHandler,
|
||||||
@ -13,6 +12,8 @@ import {
|
|||||||
prisma,
|
prisma,
|
||||||
uniqueName,
|
uniqueName,
|
||||||
version,
|
version,
|
||||||
|
sentryDSN,
|
||||||
|
executeCommand,
|
||||||
} from "../../../lib/common";
|
} from "../../../lib/common";
|
||||||
import { scheduler } from "../../../lib/scheduler";
|
import { scheduler } from "../../../lib/scheduler";
|
||||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||||
@ -24,6 +25,35 @@ export async function hashPassword(password: string): Promise<string> {
|
|||||||
return bcrypt.hash(password, saltRounds);
|
return bcrypt.hash(password, saltRounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function backup(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { backupData } = request.params;
|
||||||
|
let std = null;
|
||||||
|
const [id, backupType, type, zipped, storage] = backupData.split(':')
|
||||||
|
console.log(id, backupType, type, zipped, storage)
|
||||||
|
const database = await prisma.database.findUnique({ where: { id } })
|
||||||
|
if (database) {
|
||||||
|
// await executeDockerCmd({
|
||||||
|
// dockerId: database.destinationDockerId,
|
||||||
|
// command: `docker pull coollabsio/backup:latest`,
|
||||||
|
// })
|
||||||
|
std = await executeCommand({
|
||||||
|
dockerId: database.destinationDockerId,
|
||||||
|
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
if (std.stdout) {
|
||||||
|
return std.stdout;
|
||||||
|
}
|
||||||
|
if (std.stderr) {
|
||||||
|
return std.stderr;
|
||||||
|
}
|
||||||
|
return 'nope';
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
|
}
|
||||||
export async function cleanupManually(request: FastifyRequest) {
|
export async function cleanupManually(request: FastifyRequest) {
|
||||||
try {
|
try {
|
||||||
const { serverId } = request.body;
|
const { serverId } = request.body;
|
||||||
@ -110,14 +140,10 @@ export async function update(request: FastifyRequest<Update>) {
|
|||||||
try {
|
try {
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
||||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
||||||
await asyncExecShell(
|
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` });
|
||||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` });
|
||||||
);
|
|
||||||
await asyncExecShell(
|
|
||||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
|
||||||
);
|
|
||||||
return {};
|
return {};
|
||||||
} else {
|
} else {
|
||||||
await asyncSleep(2000);
|
await asyncSleep(2000);
|
||||||
@ -146,7 +172,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
if (teamId === "0") {
|
if (teamId === "0") {
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
asyncExecShell(`docker restart coolify`);
|
await executeCommand({ command: `docker restart coolify` });
|
||||||
return {};
|
return {};
|
||||||
} else {
|
} else {
|
||||||
return {};
|
return {};
|
||||||
@ -189,7 +215,7 @@ export async function showDashboard(request: FastifyRequest) {
|
|||||||
|
|
||||||
let foundUnconfiguredApplication = false;
|
let foundUnconfiguredApplication = false;
|
||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
|
if (((!application.buildPack || !application.branch) && !application.simpleDockerfile) || !application.destinationDockerId || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
|
||||||
foundUnconfiguredApplication = true
|
foundUnconfiguredApplication = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -398,7 +424,8 @@ export async function getCurrentUser(
|
|||||||
}
|
}
|
||||||
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
|
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
|
||||||
return {
|
return {
|
||||||
settings: await prisma.setting.findFirst(),
|
settings: await prisma.setting.findUnique({ where: { id: "0" } }),
|
||||||
|
sentryDSN,
|
||||||
pendingInvitations,
|
pendingInvitations,
|
||||||
token,
|
token,
|
||||||
...request.user,
|
...request.user,
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FastifyPluginAsync } from 'fastify';
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
|
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify, backup } from './handlers';
|
||||||
import { GetCurrentUser } from './types';
|
import { GetCurrentUser } from './types';
|
||||||
|
|
||||||
export interface Update {
|
export interface Update {
|
||||||
@ -52,6 +52,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.post('/internal/cleanup', {
|
fastify.post('/internal/cleanup', {
|
||||||
onRequest: [fastify.authenticate]
|
onRequest: [fastify.authenticate]
|
||||||
}, async (request) => await cleanupManually(request));
|
}, async (request) => await cleanupManually(request));
|
||||||
|
|
||||||
|
// fastify.get('/internal/backup/:backupData', {
|
||||||
|
// onRequest: [fastify.authenticate]
|
||||||
|
// }, async (request) => await backup(request));
|
||||||
};
|
};
|
||||||
|
|
||||||
export default root;
|
export default root;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { errorHandler, executeDockerCmd, prisma, createRemoteEngineConfiguration, executeSSHCmd } from '../../../../lib/common';
|
import { errorHandler, prisma, executeCommand } from '../../../../lib/common';
|
||||||
import os from 'node:os';
|
import os from 'node:os';
|
||||||
import osu from 'node-os-utils';
|
import osu from 'node-os-utils';
|
||||||
|
|
||||||
@ -71,10 +71,10 @@ export async function showUsage(request: FastifyRequest) {
|
|||||||
let { remoteEngine } = request.query
|
let { remoteEngine } = request.query
|
||||||
remoteEngine = remoteEngine === 'true' ? true : false
|
remoteEngine = remoteEngine === 'true' ? true : false
|
||||||
if (remoteEngine) {
|
if (remoteEngine) {
|
||||||
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` })
|
const { stdout: stats } = await executeCommand({ sshCommand: true, dockerId: id, command: `vmstat -s` })
|
||||||
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
|
const { stdout: disks } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
|
||||||
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` })
|
const { stdout: cpus } = await executeCommand({ sshCommand: true, dockerId: id, command: `nproc --all` })
|
||||||
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
|
const { stdout: cpuUsage } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
|
||||||
const parsed: any = parseFromText(stats)
|
const parsed: any = parseFromText(stats)
|
||||||
return {
|
return {
|
||||||
usage: {
|
usage: {
|
||||||
|
@ -4,7 +4,7 @@ import yaml from 'js-yaml';
|
|||||||
import bcrypt from 'bcryptjs';
|
import bcrypt from 'bcryptjs';
|
||||||
import cuid from 'cuid';
|
import cuid from 'cuid';
|
||||||
|
|
||||||
import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort, listSettings, generateToken } from '../../../../lib/common';
|
import { prisma, uniqueName, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, checkDomainsIsValidInDNS, checkExposedPort, listSettings, generateToken, executeCommand } from '../../../../lib/common';
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
import { checkContainer, } from '../../../../lib/docker';
|
import { checkContainer, } from '../../../../lib/docker';
|
||||||
import { removeService } from '../../../../lib/services/common';
|
import { removeService } from '../../../../lib/services/common';
|
||||||
@ -48,14 +48,19 @@ export async function cleanupUnconfiguredServices(request: FastifyRequest) {
|
|||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
if (!service.fqdn) {
|
if (!service.fqdn) {
|
||||||
if (service.destinationDockerId) {
|
if (service.destinationDockerId) {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: service.destinationDockerId,
|
dockerId: service.destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: service.destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: service.destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: service.destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
await removeService({ id: service.id });
|
await removeService({ id: service.id });
|
||||||
}
|
}
|
||||||
@ -73,58 +78,61 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const { destinationDockerId, settings } = service;
|
const { destinationDockerId, settings } = service;
|
||||||
let payload = {}
|
let payload = {}
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: service.destinationDocker.id,
|
dockerId: service.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
||||||
});
|
});
|
||||||
const containersArray = containers.trim().split('\n');
|
if (containers) {
|
||||||
if (containersArray.length > 0 && containersArray[0] !== '') {
|
const containersArray = containers.trim().split('\n');
|
||||||
const templates = await getTemplates();
|
if (containersArray.length > 0 && containersArray[0] !== '') {
|
||||||
let template = templates.find(t => t.type === service.type);
|
const templates = await getTemplates();
|
||||||
const templateStr = JSON.stringify(template)
|
let template = templates.find(t => t.type === service.type);
|
||||||
if (templateStr) {
|
const templateStr = JSON.stringify(template)
|
||||||
template = JSON.parse(templateStr.replaceAll('$$id', service.id));
|
if (templateStr) {
|
||||||
}
|
template = JSON.parse(templateStr.replaceAll('$$id', service.id));
|
||||||
for (const container of containersArray) {
|
}
|
||||||
let isRunning = false;
|
for (const container of containersArray) {
|
||||||
let isExited = false;
|
let isRunning = false;
|
||||||
let isRestarting = false;
|
let isExited = false;
|
||||||
let isExcluded = false;
|
let isRestarting = false;
|
||||||
const containerObj = JSON.parse(container);
|
let isExcluded = false;
|
||||||
const exclude = template?.services[containerObj.Names]?.exclude;
|
const containerObj = JSON.parse(container);
|
||||||
if (exclude) {
|
const exclude = template?.services[containerObj.Names]?.exclude;
|
||||||
|
if (exclude) {
|
||||||
|
payload[containerObj.Names] = {
|
||||||
|
status: {
|
||||||
|
isExcluded: true,
|
||||||
|
isRunning: false,
|
||||||
|
isExited: false,
|
||||||
|
isRestarting: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = containerObj.State
|
||||||
|
if (status === 'running') {
|
||||||
|
isRunning = true;
|
||||||
|
}
|
||||||
|
if (status === 'exited') {
|
||||||
|
isExited = true;
|
||||||
|
}
|
||||||
|
if (status === 'restarting') {
|
||||||
|
isRestarting = true;
|
||||||
|
}
|
||||||
payload[containerObj.Names] = {
|
payload[containerObj.Names] = {
|
||||||
status: {
|
status: {
|
||||||
isExcluded: true,
|
isExcluded,
|
||||||
isRunning: false,
|
isRunning,
|
||||||
isExited: false,
|
isExited,
|
||||||
isRestarting: false,
|
isRestarting
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const status = containerObj.State
|
|
||||||
if (status === 'running') {
|
|
||||||
isRunning = true;
|
|
||||||
}
|
|
||||||
if (status === 'exited') {
|
|
||||||
isExited = true;
|
|
||||||
}
|
|
||||||
if (status === 'restarting') {
|
|
||||||
isRestarting = true;
|
|
||||||
}
|
|
||||||
payload[containerObj.Names] = {
|
|
||||||
status: {
|
|
||||||
isExcluded,
|
|
||||||
isRunning,
|
|
||||||
isExited,
|
|
||||||
isRestarting
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return payload
|
return payload
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
@ -239,13 +247,13 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
if (value === '$$generate_fqdn') {
|
if (value === '$$generate_fqdn') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '"' || '' + '"')
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '"' || '' + '"')
|
||||||
} else if (value === '$$generate_fqdn_slash') {
|
} else if (value === '$$generate_fqdn_slash') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '/' + '"')
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '/' + '"')
|
||||||
} else if (value === '$$generate_domain') {
|
} else if (value === '$$generate_domain') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + '"')
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + '"')
|
||||||
} else if (service.destinationDocker?.network && value === '$$generate_network') {
|
} else if (service.destinationDocker?.network && value === '$$generate_network') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + '"')
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + '"')
|
||||||
} else {
|
} else {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + '"')
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + '"')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -443,7 +451,7 @@ export async function getServiceLogs(request: FastifyRequest<GetServiceLogs>) {
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
@ -749,7 +757,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL');
|
const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL');
|
||||||
if (databaseUrl) {
|
if (databaseUrl) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"`
|
command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"`
|
||||||
})
|
})
|
||||||
@ -770,9 +778,10 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
|
|||||||
destinationDocker,
|
destinationDocker,
|
||||||
} = await getServiceFromDB({ id, teamId });
|
} = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`
|
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
return await reply.code(201).send()
|
return await reply.code(201).send()
|
||||||
}
|
}
|
||||||
@ -812,36 +821,42 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
if (user) ftpUser = user;
|
if (user) ftpUser = user;
|
||||||
if (savedPassword) ftpPassword = decrypt(savedPassword);
|
if (savedPassword) ftpPassword = decrypt(savedPassword);
|
||||||
|
|
||||||
const { stdout: password } = await asyncExecShell(
|
// TODO: rewrite these to usable without shell
|
||||||
`echo ${ftpPassword} | openssl passwd -1 -stdin`
|
const { stdout: password } = await executeCommand({
|
||||||
|
command:
|
||||||
|
`echo ${ftpPassword} | openssl passwd -1 -stdin`,
|
||||||
|
shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
await fs.stat(hostkeyDir);
|
await fs.stat(hostkeyDir);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await asyncExecShell(`mkdir -p ${hostkeyDir}`);
|
await executeCommand({ command: `mkdir -p ${hostkeyDir}` });
|
||||||
}
|
}
|
||||||
if (!ftpHostKey) {
|
if (!ftpHostKey) {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
|
command:
|
||||||
|
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
|
||||||
|
}
|
||||||
);
|
);
|
||||||
const { stdout: ftpHostKey } = await asyncExecShell(`cat ${hostkeyDir}/${id}.ed25519`);
|
const { stdout: ftpHostKey } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.ed25519` });
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
where: { serviceId: id },
|
where: { serviceId: id },
|
||||||
data: { ftpHostKey: encrypt(ftpHostKey) }
|
data: { ftpHostKey: encrypt(ftpHostKey) }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(`echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`);
|
await executeCommand({ command: `echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`, shell: true });
|
||||||
}
|
}
|
||||||
if (!ftpHostKeyPrivate) {
|
if (!ftpHostKeyPrivate) {
|
||||||
await asyncExecShell(`ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa`);
|
await executeCommand({ command: `ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa` });
|
||||||
const { stdout: ftpHostKeyPrivate } = await asyncExecShell(`cat ${hostkeyDir}/${id}.rsa`);
|
const { stdout: ftpHostKeyPrivate } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.rsa` });
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
where: { serviceId: id },
|
where: { serviceId: id },
|
||||||
data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) }
|
data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(`echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`);
|
await executeCommand({ command: `echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`, shell: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
@ -856,9 +871,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
try {
|
try {
|
||||||
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
||||||
if (isRunning) {
|
if (isRunning) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`
|
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
@ -902,9 +918,9 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
`${hostkeyDir}/${id}.sh`,
|
`${hostkeyDir}/${id}.sh`,
|
||||||
`#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/`
|
`#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/`
|
||||||
);
|
);
|
||||||
await asyncExecShell(`chmod +x ${hostkeyDir}/${id}.sh`);
|
await executeCommand({ command: `chmod +x ${hostkeyDir}/${id}.sh` });
|
||||||
await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose));
|
await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose));
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
|
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
|
||||||
})
|
})
|
||||||
@ -921,9 +937,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
data: { ftpPublicPort: null }
|
data: { ftpPublicPort: null }
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`
|
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -937,8 +954,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
command:
|
||||||
|
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
||||||
|
}
|
||||||
);
|
);
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import { promises as dns } from 'dns';
|
import { promises as dns } from 'dns';
|
||||||
import { X509Certificate } from 'node:crypto';
|
import { X509Certificate } from 'node:crypto';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||||
import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDev, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common';
|
import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, executeCommand, getDomain, isDev, isDNSValid, isDomainConfigured, listSettings, prisma, sentryDSN, version } from '../../../../lib/common';
|
||||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
|
||||||
|
|
||||||
|
|
||||||
export async function listAllSettings(request: FastifyRequest) {
|
export async function listAllSettings(request: FastifyRequest) {
|
||||||
@ -11,6 +11,13 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const settings = await listSettings();
|
const settings = await listSettings();
|
||||||
const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } })
|
const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } })
|
||||||
|
let registries = await prisma.dockerRegistry.findMany({ where: { team: { id: teamId } } })
|
||||||
|
registries = registries.map((registry) => {
|
||||||
|
if (registry.password) {
|
||||||
|
registry.password = decrypt(registry.password)
|
||||||
|
}
|
||||||
|
return registry
|
||||||
|
})
|
||||||
const unencryptedKeys = []
|
const unencryptedKeys = []
|
||||||
if (sshKeys.length > 0) {
|
if (sshKeys.length > 0) {
|
||||||
for (const key of sshKeys) {
|
for (const key of sshKeys) {
|
||||||
@ -27,7 +34,8 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
return {
|
return {
|
||||||
settings,
|
settings,
|
||||||
certificates: cns,
|
certificates: cns,
|
||||||
sshKeys: unencryptedKeys
|
sshKeys: unencryptedKeys,
|
||||||
|
registries
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@ -35,7 +43,10 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
}
|
}
|
||||||
export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) {
|
export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const {
|
let {
|
||||||
|
previewSeparator,
|
||||||
|
numberOfDockerImagesKeptLocally,
|
||||||
|
doNotTrack,
|
||||||
fqdn,
|
fqdn,
|
||||||
isAPIDebuggingEnabled,
|
isAPIDebuggingEnabled,
|
||||||
isRegistrationEnabled,
|
isRegistrationEnabled,
|
||||||
@ -47,10 +58,29 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
|||||||
DNSServers,
|
DNSServers,
|
||||||
proxyDefaultRedirect
|
proxyDefaultRedirect
|
||||||
} = request.body
|
} = request.body
|
||||||
const { id } = await listSettings();
|
const { id, previewSeparator: SetPreviewSeparator } = await listSettings();
|
||||||
|
if (numberOfDockerImagesKeptLocally) {
|
||||||
|
numberOfDockerImagesKeptLocally = Number(numberOfDockerImagesKeptLocally)
|
||||||
|
}
|
||||||
|
if (previewSeparator == '') {
|
||||||
|
previewSeparator = '.'
|
||||||
|
}
|
||||||
|
if (SetPreviewSeparator != previewSeparator) {
|
||||||
|
const applications = await prisma.application.findMany({ where: { previewApplication: { some: { id: { not: undefined } } } }, include: { previewApplication: true } })
|
||||||
|
for (const application of applications) {
|
||||||
|
for (const preview of application.previewApplication) {
|
||||||
|
const { protocol } = new URL(preview.customDomain)
|
||||||
|
const { pullmergeRequestId } = preview
|
||||||
|
const { fqdn } = application
|
||||||
|
const newPreviewDomain = `${protocol}//${pullmergeRequestId}${previewSeparator}${getDomain(fqdn)}`
|
||||||
|
await prisma.previewApplication.update({ where: { id: preview.id }, data: { customDomain: newPreviewDomain } })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await prisma.setting.update({
|
await prisma.setting.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled, }
|
data: { previewSeparator, numberOfDockerImagesKeptLocally, doNotTrack, isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled }
|
||||||
});
|
});
|
||||||
if (fqdn) {
|
if (fqdn) {
|
||||||
await prisma.setting.update({ where: { id }, data: { fqdn } });
|
await prisma.setting.update({ where: { id }, data: { fqdn } });
|
||||||
@ -59,6 +89,14 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
|||||||
if (minPort && maxPort) {
|
if (minPort && maxPort) {
|
||||||
await prisma.setting.update({ where: { id }, data: { minPort, maxPort } });
|
await prisma.setting.update({ where: { id }, data: { minPort, maxPort } });
|
||||||
}
|
}
|
||||||
|
if (doNotTrack === false) {
|
||||||
|
// Sentry.init({
|
||||||
|
// dsn: sentryDSN,
|
||||||
|
// environment: isDev ? 'development' : 'production',
|
||||||
|
// release: version
|
||||||
|
// });
|
||||||
|
// console.log('Sentry initialized')
|
||||||
|
}
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@ -91,7 +129,7 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
|
|||||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||||
const found = await isDomainConfigured({ id, fqdn });
|
const found = await isDomainConfigured({ id, fqdn });
|
||||||
if (found) {
|
if (found) {
|
||||||
throw "Domain already configured";
|
throw { message: "Domain already configured" };
|
||||||
}
|
}
|
||||||
if (isDNSCheckEnabled && !forceSave && !isDev) {
|
if (isDNSCheckEnabled && !forceSave && !isDev) {
|
||||||
const hostname = request.hostname.split(':')[0]
|
const hostname = request.hostname.split(':')[0]
|
||||||
@ -131,8 +169,9 @@ export async function saveSSHKey(request: FastifyRequest<SaveSSHKey>, reply: Fas
|
|||||||
}
|
}
|
||||||
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
const { id } = request.body;
|
const { id } = request.body;
|
||||||
await prisma.sshKey.delete({ where: { id } })
|
await prisma.sshKey.deleteMany({ where: { id, teamId } })
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@ -141,9 +180,54 @@ export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply:
|
|||||||
|
|
||||||
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
const { id } = request.body;
|
const { id } = request.body;
|
||||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`)
|
await executeCommand({ command: `docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`, shell: true })
|
||||||
await prisma.certificate.delete({ where: { id } })
|
await prisma.certificate.deleteMany({ where: { id, teamId } })
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setDockerRegistry(request: FastifyRequest<SetDefaultRegistry>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id, username, password } = request.body;
|
||||||
|
|
||||||
|
let encryptedPassword = ''
|
||||||
|
if (password) encryptedPassword = encrypt(password)
|
||||||
|
|
||||||
|
if (teamId === '0') {
|
||||||
|
await prisma.dockerRegistry.update({ where: { id }, data: { username, password: encryptedPassword } })
|
||||||
|
} else {
|
||||||
|
await prisma.dockerRegistry.updateMany({ where: { id, teamId }, data: { username, password: encryptedPassword } })
|
||||||
|
}
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function addDockerRegistry(request: FastifyRequest<AddDefaultRegistry>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { name, url, username, password } = request.body;
|
||||||
|
|
||||||
|
let encryptedPassword = ''
|
||||||
|
if (password) encryptedPassword = encrypt(password)
|
||||||
|
await prisma.dockerRegistry.create({ data: { name, url, username, password: encryptedPassword, team: { connect: { id: teamId } } } })
|
||||||
|
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteDockerRegistry(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.body;
|
||||||
|
await prisma.application.updateMany({ where: { dockerRegistryId: id }, data: { dockerRegistryId: null } })
|
||||||
|
await prisma.dockerRegistry.deleteMany({ where: { id, teamId } })
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
|
@ -2,8 +2,8 @@ import { FastifyPluginAsync } from 'fastify';
|
|||||||
import { X509Certificate } from 'node:crypto';
|
import { X509Certificate } from 'node:crypto';
|
||||||
|
|
||||||
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
|
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
|
||||||
import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers';
|
import { addDockerRegistry, checkDNS, checkDomain, deleteCertificates, deleteDockerRegistry, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey, setDockerRegistry } from './handlers';
|
||||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
|
||||||
|
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
@ -20,6 +20,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
||||||
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
||||||
|
|
||||||
|
fastify.post<SetDefaultRegistry>('/registry', async (request, reply) => await setDockerRegistry(request, reply));
|
||||||
|
fastify.post<AddDefaultRegistry>('/registry/new', async (request, reply) => await addDockerRegistry(request, reply));
|
||||||
|
fastify.delete<OnlyIdInBody>('/registry', async (request, reply) => await deleteDockerRegistry(request, reply));
|
||||||
|
|
||||||
fastify.post('/upload', async (request) => {
|
fastify.post('/upload', async (request) => {
|
||||||
try {
|
try {
|
||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
@ -53,7 +57,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
|
|
||||||
});
|
});
|
||||||
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
|
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
|
||||||
// fastify.get('/certificates', async (request) => await getCertificates(request))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default root;
|
export default root;
|
||||||
|
@ -2,6 +2,9 @@ import { OnlyId } from "../../../../types"
|
|||||||
|
|
||||||
export interface SaveSettings {
|
export interface SaveSettings {
|
||||||
Body: {
|
Body: {
|
||||||
|
previewSeparator: string,
|
||||||
|
numberOfDockerImagesKeptLocally: number,
|
||||||
|
doNotTrack: boolean,
|
||||||
fqdn: string,
|
fqdn: string,
|
||||||
isAPIDebuggingEnabled: boolean,
|
isAPIDebuggingEnabled: boolean,
|
||||||
isRegistrationEnabled: boolean,
|
isRegistrationEnabled: boolean,
|
||||||
@ -21,30 +24,46 @@ export interface DeleteDomain {
|
|||||||
}
|
}
|
||||||
export interface CheckDomain extends OnlyId {
|
export interface CheckDomain extends OnlyId {
|
||||||
Body: {
|
Body: {
|
||||||
fqdn: string,
|
fqdn: string,
|
||||||
forceSave: boolean,
|
forceSave: boolean,
|
||||||
dualCerts: boolean,
|
dualCerts: boolean,
|
||||||
isDNSCheckEnabled: boolean,
|
isDNSCheckEnabled: boolean,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface CheckDNS {
|
export interface CheckDNS {
|
||||||
Params: {
|
Params: {
|
||||||
domain: string,
|
domain: string,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface SaveSSHKey {
|
export interface SaveSSHKey {
|
||||||
Body: {
|
Body: {
|
||||||
privateKey: string,
|
privateKey: string,
|
||||||
name: string
|
name: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface DeleteSSHKey {
|
export interface DeleteSSHKey {
|
||||||
Body: {
|
Body: {
|
||||||
id: string
|
id: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface OnlyIdInBody {
|
export interface OnlyIdInBody {
|
||||||
Body: {
|
Body: {
|
||||||
id: string
|
id: string
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SetDefaultRegistry {
|
||||||
|
Body: {
|
||||||
|
id: string
|
||||||
|
username: string
|
||||||
|
password: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export interface AddDefaultRegistry {
|
||||||
|
Body: {
|
||||||
|
url: string
|
||||||
|
name: string
|
||||||
|
username: string
|
||||||
|
password: string
|
||||||
|
}
|
||||||
}
|
}
|
@ -37,9 +37,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
|
|||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user
|
||||||
|
|
||||||
const settings = await prisma.setting.findFirst({});
|
const settings = await prisma.setting.findFirst({});
|
||||||
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
|
|
||||||
|
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
return {
|
return {
|
||||||
|
@ -71,7 +71,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
|
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
|
||||||
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
|
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
|
||||||
if (!allowedGithubEvents.includes(githubEvent)) {
|
if (!allowedGithubEvents.includes(githubEvent)) {
|
||||||
throw { status: 500, message: 'Event not allowed.' }
|
throw { status: 500, message: 'Event not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (githubEvent === 'ping') {
|
if (githubEvent === 'ping') {
|
||||||
return { pong: 'cool' }
|
return { pong: 'cool' }
|
||||||
@ -89,9 +89,10 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
branch = body.pull_request.base.ref
|
branch = body.pull_request.base.ref
|
||||||
}
|
}
|
||||||
if (!projectId || !branch) {
|
if (!projectId || !branch) {
|
||||||
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' }
|
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!', type: 'webhook' }
|
||||||
}
|
}
|
||||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (applicationsFound && applicationsFound.length > 0) {
|
if (applicationsFound && applicationsFound.length > 0) {
|
||||||
for (const application of applicationsFound) {
|
for (const application of applicationsFound) {
|
||||||
const buildId = cuid();
|
const buildId = cuid();
|
||||||
@ -106,7 +107,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const checksum = Buffer.from(githubSignature, 'utf8');
|
const checksum = Buffer.from(githubSignature, 'utf8');
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||||
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?' }
|
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?', type: 'webhook' }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +157,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const sourceBranch = body.pull_request.head.ref
|
const sourceBranch = body.pull_request.head.ref
|
||||||
const sourceRepository = body.pull_request.head.repo.full_name
|
const sourceRepository = body.pull_request.head.repo.full_name
|
||||||
if (!allowedActions.includes(pullmergeRequestAction)) {
|
if (!allowedActions.includes(pullmergeRequestAction)) {
|
||||||
throw { status: 500, message: 'Action not allowed.' }
|
throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (application.settings.previews) {
|
if (application.settings.previews) {
|
||||||
@ -168,7 +169,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
if (!isRunning) {
|
if (!isRunning) {
|
||||||
throw { status: 500, message: 'Application not running.' }
|
throw { status: 500, message: 'Application not running.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
@ -192,7 +193,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
data: {
|
data: {
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
sourceBranch,
|
sourceBranch,
|
||||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
|
||||||
application: { connect: { id: application.id } }
|
application: { connect: { id: application.id } }
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -257,8 +258,8 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -44,8 +44,9 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
|
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
|
||||||
const webhookToken = request.headers['x-gitlab-token'];
|
const webhookToken = request.headers['x-gitlab-token'];
|
||||||
if (!webhookToken && !isDev) {
|
if (!webhookToken && !isDev) {
|
||||||
throw { status: 500, message: 'Invalid webhookToken.' }
|
throw { status: 500, message: 'Invalid webhookToken.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (objectKind === 'push') {
|
if (objectKind === 'push') {
|
||||||
const projectId = Number(project_id);
|
const projectId = Number(project_id);
|
||||||
const branch = ref.split('/')[2];
|
const branch = ref.split('/')[2];
|
||||||
@ -95,10 +96,10 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
const pullmergeRequestId = request.body.object_attributes.iid.toString();
|
const pullmergeRequestId = request.body.object_attributes.iid.toString();
|
||||||
const projectId = Number(id);
|
const projectId = Number(id);
|
||||||
if (!allowedActions.includes(action)) {
|
if (!allowedActions.includes(action)) {
|
||||||
throw { status: 500, message: 'Action not allowed.' }
|
throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (isDraft) {
|
if (isDraft) {
|
||||||
throw { status: 500, message: 'Draft MR, do nothing.' }
|
throw { status: 500, message: 'Draft MR, do nothing.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
||||||
if (applicationsFound && applicationsFound.length > 0) {
|
if (applicationsFound && applicationsFound.length > 0) {
|
||||||
@ -113,11 +114,11 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
if (!isRunning) {
|
if (!isRunning) {
|
||||||
throw { status: 500, message: 'Application not running.' }
|
throw { status: 500, message: 'Application not running.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
||||||
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!' }
|
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
action === 'opened' ||
|
action === 'opened' ||
|
||||||
@ -140,7 +141,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
data: {
|
data: {
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
sourceBranch,
|
sourceBranch,
|
||||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
|
||||||
application: { connect: { id: application.id } }
|
application: { connect: { id: application.id } }
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -188,7 +189,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,5 +1,5 @@
|
|||||||
import { FastifyRequest } from "fastify";
|
import { FastifyRequest } from "fastify";
|
||||||
import { errorHandler, getDomain, isDev, prisma, executeDockerCmd, fixType } from "../../../lib/common";
|
import { errorHandler, getDomain, isDev, prisma, executeCommand } from "../../../lib/common";
|
||||||
import { getTemplates } from "../../../lib/services";
|
import { getTemplates } from "../../../lib/services";
|
||||||
import { OnlyId } from "../../../types";
|
import { OnlyId } from "../../../types";
|
||||||
|
|
||||||
@ -171,8 +171,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const { id = null } = request.params;
|
const { id = null } = request.params;
|
||||||
const settings = await prisma.setting.findFirst();
|
const coolifySettings = await prisma.setting.findFirst();
|
||||||
if (settings.isTraefikUsed && settings.proxyDefaultRedirect) {
|
if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) {
|
||||||
traefik.http.routers['catchall-http'] = {
|
traefik.http.routers['catchall-http'] = {
|
||||||
entrypoints: ["web"],
|
entrypoints: ["web"],
|
||||||
rule: "HostRegexp(`{catchall:.*}`)",
|
rule: "HostRegexp(`{catchall:.*}`)",
|
||||||
@ -190,7 +190,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
traefik.http.middlewares['redirect-regexp'] = {
|
traefik.http.middlewares['redirect-regexp'] = {
|
||||||
redirectregex: {
|
redirectregex: {
|
||||||
regex: '(.*)',
|
regex: '(.*)',
|
||||||
replacement: settings.proxyDefaultRedirect,
|
replacement: coolifySettings.proxyDefaultRedirect,
|
||||||
permanent: false
|
permanent: false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -263,10 +263,12 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
const runningContainers = {}
|
const runningContainers = {}
|
||||||
applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||||
for (const dockerId of dockerIds) {
|
for (const dockerId of dockerIds) {
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||||
const containersArray = container.trim().split('\n');
|
if (container) {
|
||||||
if (containersArray.length > 0) {
|
const containersArray = container.trim().split('\n');
|
||||||
runningContainers[dockerId] = containersArray
|
if (containersArray.length > 0) {
|
||||||
|
runningContainers[dockerId] = containersArray
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
@ -332,20 +334,22 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
||||||
if (previews) {
|
if (previews) {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
|
||||||
const containers = stdout
|
if (stdout) {
|
||||||
.trim()
|
const containers = stdout
|
||||||
.split('\n')
|
.trim()
|
||||||
.filter((a) => a)
|
.split('\n')
|
||||||
.map((c) => c.replace(/"/g, ''));
|
.filter((a) => a)
|
||||||
if (containers.length > 0) {
|
.map((c) => c.replace(/"/g, ''));
|
||||||
for (const container of containers) {
|
if (containers.length > 0) {
|
||||||
const previewDomain = `${container.split('-')[1]}.${domain}`;
|
for (const container of containers) {
|
||||||
const nakedDomain = previewDomain.replace(/^www\./, '');
|
const previewDomain = `${container.split('-')[1]}${coolifySettings.previewSeparator}${domain}`;
|
||||||
const pathPrefix = '/'
|
const nakedDomain = previewDomain.replace(/^www\./, '');
|
||||||
const serviceId = `${container}-${port || 'default'}`
|
const pathPrefix = '/'
|
||||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
const serviceId = `${container}-${port || 'default'}`
|
||||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) }
|
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||||
|
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -359,10 +363,12 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
const runningContainers = {}
|
const runningContainers = {}
|
||||||
services.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
services.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||||
for (const dockerId of dockerIds) {
|
for (const dockerId of dockerIds) {
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||||
const containersArray = container.trim().split('\n');
|
if (container) {
|
||||||
if (containersArray.length > 0) {
|
const containersArray = container.trim().split('\n');
|
||||||
runningContainers[dockerId] = containersArray
|
if (containersArray.length > 0) {
|
||||||
|
runningContainers[dockerId] = containersArray
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
apps/backup/.dockerignore
Normal file
2
apps/backup/.dockerignore
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
backup/*
|
27
apps/backup/Dockerfile
Normal file
27
apps/backup/Dockerfile
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
ARG PNPM_VERSION=7.17.1
|
||||||
|
|
||||||
|
FROM node:18-slim as build
|
||||||
|
WORKDIR /app
|
||||||
|
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||||
|
|
||||||
|
COPY ./package*.json .
|
||||||
|
RUN pnpm install -p
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Production build
|
||||||
|
FROM node:18-slim
|
||||||
|
ARG DOCKER_VERSION=20.10.18
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
ENV NODE_ENV production
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt update && apt -y install curl
|
||||||
|
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||||
|
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
|
||||||
|
RUN chmod +x /usr/bin/docker
|
||||||
|
COPY --from=minio/mc:latest /usr/bin/mc /usr/bin/mc
|
||||||
|
COPY --from=build /app/ .
|
||||||
|
|
||||||
|
ENV CHECKPOINT_DISABLE=1
|
||||||
|
CMD node /app/src/index.mjs
|
0
apps/backup/backups/.gitkeep
Normal file
0
apps/backup/backups/.gitkeep
Normal file
24
apps/backup/package.json
Normal file
24
apps/backup/package.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"name": "backup",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "",
|
||||||
|
"author": "Andras Bacsai",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"main": "index.mjs",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"start": "NODE_ENV=production node src/index.mjs",
|
||||||
|
"dev": "pnpm cleanup && NODE_ENV=development node src/index.mjs",
|
||||||
|
"build": "docker build -t backup .",
|
||||||
|
"test": "pnpm build && docker run -ti --rm -v /var/run/docker.sock:/var/run/docker.sock -v /root/devel/coolify/apps/backup/backups:/app/backups -e CONTAINERS_TO_BACKUP='clatmhc6000008lvb5a5tnvsk:database:mysql:local' backup",
|
||||||
|
"cleanup": "rm -rf backups/*"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.222.0",
|
||||||
|
"@aws-sdk/lib-storage": "^3.222.0",
|
||||||
|
"cuid": "2.1.8",
|
||||||
|
"dotenv": "16.0.3",
|
||||||
|
"zx": "7.1.1"
|
||||||
|
}
|
||||||
|
}
|
126
apps/backup/src/index.mjs
Normal file
126
apps/backup/src/index.mjs
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
import * as dotenv from 'dotenv';
|
||||||
|
dotenv.config()
|
||||||
|
|
||||||
|
import 'zx/globals';
|
||||||
|
import cuid from 'cuid';
|
||||||
|
import { S3, PutObjectCommand } from "@aws-sdk/client-s3";
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
const isDev = process.env.NODE_ENV === 'development'
|
||||||
|
$.verbose = !!isDev
|
||||||
|
|
||||||
|
if (!process.env.CONTAINERS_TO_BACKUP && !isDev) {
|
||||||
|
console.log(chalk.red(`No containers to backup!`))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
const mysqlGzipLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:local';
|
||||||
|
const mysqlRawLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:raw:local';
|
||||||
|
const postgresqlGzipLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:gzip:local';
|
||||||
|
const postgresqlRawLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:raw:local';
|
||||||
|
|
||||||
|
const minio = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:minio|http|min.arm.coolify.io|backups|<access_key>|<secret_key>';
|
||||||
|
const digitalOcean = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:do|https|fra1.digitaloceanspaces.com|backups|<access_key>|<secret_key>';
|
||||||
|
|
||||||
|
const devContainers = [mysqlGzipLocal, mysqlRawLocal, postgresqlGzipLocal, postgresqlRawLocal]
|
||||||
|
|
||||||
|
const containers = isDev
|
||||||
|
? devContainers
|
||||||
|
: process.env.CONTAINERS_TO_BACKUP.split(',')
|
||||||
|
|
||||||
|
const backup = async (container) => {
|
||||||
|
const id = cuid()
|
||||||
|
const [name, backupType, type, zipped, storage] = container.split(':')
|
||||||
|
const directory = `backups`;
|
||||||
|
const filename = zipped === 'raw'
|
||||||
|
? `${name}-${type}-${backupType}-${new Date().getTime()}.sql`
|
||||||
|
: `${name}-${type}-${backupType}-${new Date().getTime()}.tgz`
|
||||||
|
const backup = `${directory}/${filename}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await $`docker inspect ${name.split(' ')[0]}`.quiet()
|
||||||
|
if (backupType === 'database') {
|
||||||
|
if (type === 'mysql') {
|
||||||
|
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||||
|
const { stdout: rootPassword } = await $`docker exec ${name} printenv MYSQL_ROOT_PASSWORD`.quiet()
|
||||||
|
if (zipped === 'raw') {
|
||||||
|
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" > ${backup}`
|
||||||
|
} else if (zipped === 'gzip') {
|
||||||
|
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" | gzip > ${backup}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (type === 'postgresql') {
|
||||||
|
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||||
|
const { stdout: userPassword } = await $`docker exec ${name} printenv POSTGRES_PASSWORD`
|
||||||
|
const { stdout: user } = await $`docker exec ${name} printenv POSTGRES_USER`
|
||||||
|
if (zipped === 'raw') {
|
||||||
|
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()}> ${backup}`
|
||||||
|
} else if (zipped === 'gzip') {
|
||||||
|
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()} | gzip > ${backup}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const [storageType, ...storageArgs] = storage.split('|')
|
||||||
|
if (storageType !== 'local') {
|
||||||
|
let s3Protocol, s3Url, s3Bucket, s3Key, s3Secret = null
|
||||||
|
if (storageArgs.length > 0) {
|
||||||
|
[s3Protocol, s3Url, s3Bucket, s3Key, s3Secret] = storageArgs
|
||||||
|
}
|
||||||
|
if (storageType === 'minio') {
|
||||||
|
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||||
|
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
await $`mc alias set ${id} ${s3Protocol}://${s3Url} ${s3Key} ${s3Secret}`
|
||||||
|
await $`mc stat ${id}`
|
||||||
|
await $`mc cp ${backup} ${id}/${s3Bucket}`
|
||||||
|
await $`rm ${backup}`
|
||||||
|
await $`mc alias rm ${id}`
|
||||||
|
} else if (storageType === 'do') {
|
||||||
|
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||||
|
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
console.log({ s3Protocol, s3Url, s3Bucket, s3Key, s3Secret })
|
||||||
|
console.log(chalk.blue(`Uploading ${name}:${type} to DigitalOcean Spaces...`))
|
||||||
|
const readstream = fs.createReadStream(backup)
|
||||||
|
const bucketParams = {
|
||||||
|
Bucket: s3Bucket,
|
||||||
|
Key: filename,
|
||||||
|
Body: readstream
|
||||||
|
};
|
||||||
|
const s3Client = new S3({
|
||||||
|
forcePathStyle: false,
|
||||||
|
endpoint: `${s3Protocol}://${s3Url}`,
|
||||||
|
region: "us-east-1",
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: s3Key,
|
||||||
|
secretAccessKey: s3Secret
|
||||||
|
},
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const data = await s3Client.send(new PutObjectCommand(bucketParams));
|
||||||
|
console.log(chalk.green("Successfully uploaded backup: " +
|
||||||
|
bucketParams.Bucket +
|
||||||
|
"/" +
|
||||||
|
bucketParams.Key
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return data;
|
||||||
|
} catch (err) {
|
||||||
|
console.log("Error", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(chalk.green(`Backup of ${name}:${type} complete!`))
|
||||||
|
} catch (error) {
|
||||||
|
console.log(chalk.red(`Backup of ${name}:${type} failed!`))
|
||||||
|
console.log(chalk.red(error))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const promises = []
|
||||||
|
for (const container of containers) {
|
||||||
|
// await backup(container);
|
||||||
|
promises.push(backup(container))
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
@ -42,6 +42,8 @@
|
|||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@sentry/svelte": "7.21.1",
|
||||||
|
"@sentry/tracing": "7.21.1",
|
||||||
"@sveltejs/adapter-static": "1.0.0-next.48",
|
"@sveltejs/adapter-static": "1.0.0-next.48",
|
||||||
"@tailwindcss/typography": "0.5.8",
|
"@tailwindcss/typography": "0.5.8",
|
||||||
"cuid": "2.1.8",
|
"cuid": "2.1.8",
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
|
import * as Sentry from '@sentry/svelte';
|
||||||
export async function handle({ event, resolve }) {
|
export async function handle({ event, resolve }) {
|
||||||
const response = await resolve(event, { ssr: false });
|
const response = await resolve(event, { ssr: false });
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
export const handleError = ({ error, event }) => {
|
||||||
|
Sentry.captureException(error, { event });
|
||||||
|
|
||||||
|
return {
|
||||||
|
message: 'Whoops!',
|
||||||
|
code: error?.code ?? 'UNKNOWN'
|
||||||
|
};
|
||||||
|
};
|
@ -3,6 +3,8 @@ import { addToast } from '$lib/store';
|
|||||||
export const asyncSleep = (delay: number) =>
|
export const asyncSleep = (delay: number) =>
|
||||||
new Promise((resolve) => setTimeout(resolve, delay));
|
new Promise((resolve) => setTimeout(resolve, delay));
|
||||||
|
|
||||||
|
export let initials = (str:string) => (str||'').split(' ').map( (wrd) => wrd[0]).join('')
|
||||||
|
|
||||||
export function errorNotification(error: any | { message: string }): void {
|
export function errorNotification(error: any | { message: string }): void {
|
||||||
if (error.message) {
|
if (error.message) {
|
||||||
if (error.message === 'Cannot read properties of undefined (reading \'postMessage\')') {
|
if (error.message === 'Cannot read properties of undefined (reading \'postMessage\')') {
|
||||||
@ -87,4 +89,4 @@ export function handlerNotFoundLoad(error: any, url: URL) {
|
|||||||
|
|
||||||
export function getRndInteger(min: number, max: number) {
|
export function getRndInteger(min: number, max: number) {
|
||||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||||
}
|
}
|
||||||
|
4
apps/ui/src/lib/components/ContextMenu.svelte
Normal file
4
apps/ui/src/lib/components/ContextMenu.svelte
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<nav class="header justify-between px-2 mb-5 lg:px-10">
|
||||||
|
<slot />
|
||||||
|
<slot name="actions" />
|
||||||
|
</nav>
|
@ -15,7 +15,7 @@
|
|||||||
export let placeholder = '';
|
export let placeholder = '';
|
||||||
export let inputStyle = '';
|
export let inputStyle = '';
|
||||||
|
|
||||||
let disabledClass = 'bg-coolback disabled:bg-coolblack w-full';
|
let disabledClass = 'input input-primary bg-coolback disabled:bg-coolblack w-full';
|
||||||
let isHttps = browser && window.location.protocol === 'https:';
|
let isHttps = browser && window.location.protocol === 'https:';
|
||||||
|
|
||||||
function copyToClipboard() {
|
function copyToClipboard() {
|
||||||
|
11
apps/ui/src/lib/components/LocalePicker.svelte
Normal file
11
apps/ui/src/lib/components/LocalePicker.svelte
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
<script>
|
||||||
|
import { locale, locales } from '$lib/translations';
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<select bind:value={$locale} class="w-14">
|
||||||
|
{#each $locales as l}
|
||||||
|
<option value={l}>{l}</option>
|
||||||
|
{/each}
|
||||||
|
</select>
|
||||||
|
</div>
|
14
apps/ui/src/lib/components/badges/DestinationBadge.svelte
Normal file
14
apps/ui/src/lib/components/badges/DestinationBadge.svelte
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
<script>
|
||||||
|
import Tooltip from '../Tooltip.svelte';
|
||||||
|
import { initials } from '$lib/common';
|
||||||
|
export let name;
|
||||||
|
export let thingId;
|
||||||
|
let id = 'destination' + thingId;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if (name || '').length > 0}
|
||||||
|
<span class="badge rounded uppercase text-xs " {id}>
|
||||||
|
{initials(name)}
|
||||||
|
</span>
|
||||||
|
<Tooltip triggeredBy="#{id}" placement="right">{name}</Tooltip>
|
||||||
|
{/if}
|
19
apps/ui/src/lib/components/badges/PublicBadge.svelte
Normal file
19
apps/ui/src/lib/components/badges/PublicBadge.svelte
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
<div title="Public">
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="h-6 w-6 "
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<circle cx="12" cy="12" r="9" />
|
||||||
|
<line x1="3.6" y1="9" x2="20.4" y2="9" />
|
||||||
|
<line x1="3.6" y1="15" x2="20.4" y2="15" />
|
||||||
|
<path d="M11.5 3a17 17 0 0 0 0 18" />
|
||||||
|
<path d="M12.5 3a17 17 0 0 1 0 18" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
26
apps/ui/src/lib/components/badges/StatusBadge.svelte
Normal file
26
apps/ui/src/lib/components/badges/StatusBadge.svelte
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { getStatus } from '$lib/container/status';
|
||||||
|
|
||||||
|
import { onDestroy, onMount } from 'svelte';
|
||||||
|
export let thing: any;
|
||||||
|
let getting = getStatus(thing);
|
||||||
|
let refreshing: any;
|
||||||
|
let status: any;
|
||||||
|
// AutoUpdates Status every 5 seconds
|
||||||
|
onMount(() => {
|
||||||
|
refreshing = setInterval(() => {
|
||||||
|
getStatus(thing).then((r) => (status = r));
|
||||||
|
}, 5000);
|
||||||
|
});
|
||||||
|
onDestroy(() => {
|
||||||
|
clearInterval(refreshing);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#await getting}
|
||||||
|
<span class="badge badge-lg rounded uppercase">...</span>
|
||||||
|
{:then status}
|
||||||
|
<span class="badge badge-lg rounded uppercase badge-status-{status}">
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
{/await}
|
16
apps/ui/src/lib/components/badges/TeamsBadge.svelte
Normal file
16
apps/ui/src/lib/components/badges/TeamsBadge.svelte
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import Tooltip from '../Tooltip.svelte';
|
||||||
|
import { initials } from '$lib/common';
|
||||||
|
export let teams: any;
|
||||||
|
export let thing: any;
|
||||||
|
let id = 'teams' + thing.id;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<span>
|
||||||
|
{#each teams as team}
|
||||||
|
<a href={`/iam/teams/${team.id}`} {id} class="no-underline">
|
||||||
|
Team: {initials(team.name)}
|
||||||
|
</a>
|
||||||
|
<Tooltip triggeredBy="#{id}" placement="right" color="bg-destinations">{team.name}</Tooltip>
|
||||||
|
{/each}
|
||||||
|
</span>
|
5
apps/ui/src/lib/components/grids/Grid3.svelte
Normal file
5
apps/ui/src/lib/components/grids/Grid3.svelte
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
<div
|
||||||
|
class="grid grid-col gap-8 auto-cols-max grid-cols-1 md:grid-cols-2 lg:md:grid-cols-3 xl:grid-cols-4 p-4 lg:px-10"
|
||||||
|
>
|
||||||
|
<slot />
|
||||||
|
</div>
|
@ -42,4 +42,6 @@
|
|||||||
<Icons.Heroku {isAbsolute} />
|
<Icons.Heroku {isAbsolute} />
|
||||||
{:else if application.buildPack?.toLowerCase() === 'compose'}
|
{:else if application.buildPack?.toLowerCase() === 'compose'}
|
||||||
<Icons.Compose {isAbsolute} />
|
<Icons.Compose {isAbsolute} />
|
||||||
|
{:else if application.simpleDockerfile}
|
||||||
|
<Icons.Docker {isAbsolute} />
|
||||||
{/if}
|
{/if}
|
||||||
|
@ -0,0 +1,26 @@
|
|||||||
|
<script>
|
||||||
|
export let isAbsolute=false;
|
||||||
|
</script>
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class={isAbsolute ? 'absolute top-0 left-0 -m-2 h-12 w-12 text-sky-500' : 'mx-auto w-8 h-8 text-sky-500'}
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<path
|
||||||
|
d="M22 12.54c-1.804 -.345 -2.701 -1.08 -3.523 -2.94c-.487 .696 -1.102 1.568 -.92 2.4c.028 .238 -.32 1.002 -.557 1h-14c0 5.208 3.164 7 6.196 7c4.124 .022 7.828 -1.376 9.854 -5c1.146 -.101 2.296 -1.505 2.95 -2.46z"
|
||||||
|
/>
|
||||||
|
<path d="M5 10h3v3h-3z" />
|
||||||
|
<path d="M8 10h3v3h-3z" />
|
||||||
|
<path d="M11 10h3v3h-3z" />
|
||||||
|
<path d="M8 7h3v3h-3z" />
|
||||||
|
<path d="M11 7h3v3h-3z" />
|
||||||
|
<path d="M11 4h3v3h-3z" />
|
||||||
|
<path d="M4.571 18c1.5 0 2.047 -.074 2.958 -.78" />
|
||||||
|
<line x1="10" y1="16" x2="10" y2="16.01" />
|
||||||
|
</svg>
|
@ -0,0 +1,16 @@
|
|||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="absolute top-0 left-9 -m-2 h-6 w-6 text-sky-500 rotate-45"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="3"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<line x1="12" y1="18" x2="12.01" y2="18" />
|
||||||
|
<path d="M9.172 15.172a4 4 0 0 1 5.656 0" />
|
||||||
|
<path d="M6.343 12.343a8 8 0 0 1 11.314 0" />
|
||||||
|
<path d="M3.515 9.515c4.686 -4.687 12.284 -4.687 17 0" />
|
||||||
|
</svg>
|
After Width: | Height: | Size: 505 B |
@ -5,6 +5,7 @@
|
|||||||
const handleError = (ev: { target: { src: string } }) => (ev.target.src = fallback);
|
const handleError = (ev: { target: { src: string } }) => (ev.target.src = fallback);
|
||||||
let extension = 'png';
|
let extension = 'png';
|
||||||
let svgs = [
|
let svgs = [
|
||||||
|
'pocketbase',
|
||||||
'gitea',
|
'gitea',
|
||||||
'languagetool',
|
'languagetool',
|
||||||
'meilisearch',
|
'meilisearch',
|
||||||
|
11
apps/ui/src/lib/components/svg/sources/GithubIcon.svelte
Normal file
11
apps/ui/src/lib/components/svg/sources/GithubIcon.svelte
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
<svg viewBox="0 0 128 128" class="h-10 w-10">
|
||||||
|
<g fill="#ffffff"
|
||||||
|
><path
|
||||||
|
fill-rule="evenodd"
|
||||||
|
clip-rule="evenodd"
|
||||||
|
d="M64 5.103c-33.347 0-60.388 27.035-60.388 60.388 0 26.682 17.303 49.317 41.297 57.303 3.017.56 4.125-1.31 4.125-2.905 0-1.44-.056-6.197-.082-11.243-16.8 3.653-20.345-7.125-20.345-7.125-2.747-6.98-6.705-8.836-6.705-8.836-5.48-3.748.413-3.67.413-3.67 6.063.425 9.257 6.223 9.257 6.223 5.386 9.23 14.127 6.562 17.573 5.02.542-3.903 2.107-6.568 3.834-8.076-13.413-1.525-27.514-6.704-27.514-29.843 0-6.593 2.36-11.98 6.223-16.21-.628-1.52-2.695-7.662.584-15.98 0 0 5.07-1.623 16.61 6.19C53.7 35 58.867 34.327 64 34.304c5.13.023 10.3.694 15.127 2.033 11.526-7.813 16.59-6.19 16.59-6.19 3.287 8.317 1.22 14.46.593 15.98 3.872 4.23 6.215 9.617 6.215 16.21 0 23.194-14.127 28.3-27.574 29.796 2.167 1.874 4.097 5.55 4.097 11.183 0 8.08-.07 14.583-.07 16.572 0 1.607 1.088 3.49 4.148 2.897 23.98-7.994 41.263-30.622 41.263-57.294C124.388 32.14 97.35 5.104 64 5.104z"
|
||||||
|
/><path
|
||||||
|
d="M26.484 91.806c-.133.3-.605.39-1.035.185-.44-.196-.685-.605-.543-.906.13-.31.603-.395 1.04-.188.44.197.69.61.537.91zm2.446 2.729c-.287.267-.85.143-1.232-.28-.396-.42-.47-.983-.177-1.254.298-.266.844-.14 1.24.28.394.426.472.984.17 1.255zM31.312 98.012c-.37.258-.976.017-1.35-.52-.37-.538-.37-1.183.01-1.44.373-.258.97-.025 1.35.507.368.545.368 1.19-.01 1.452zm3.261 3.361c-.33.365-1.036.267-1.552-.23-.527-.487-.674-1.18-.343-1.544.336-.366 1.045-.264 1.564.23.527.486.686 1.18.333 1.543zm4.5 1.951c-.147.473-.825.688-1.51.486-.683-.207-1.13-.76-.99-1.238.14-.477.823-.7 1.512-.485.683.206 1.13.756.988 1.237zm4.943.361c.017.498-.563.91-1.28.92-.723.017-1.308-.387-1.315-.877 0-.503.568-.91 1.29-.924.717-.013 1.306.387 1.306.88zm4.598-.782c.086.485-.413.984-1.126 1.117-.7.13-1.35-.172-1.44-.653-.086-.498.422-.997 1.122-1.126.714-.123 1.354.17 1.444.663zm0 0"
|
||||||
|
/></g
|
||||||
|
>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.9 KiB |
21
apps/ui/src/lib/components/svg/sources/GitlabIcon.svelte
Normal file
21
apps/ui/src/lib/components/svg/sources/GitlabIcon.svelte
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
<svg viewBox="0 0 128 128" class="h-10 w-10">
|
||||||
|
<path
|
||||||
|
fill="#FC6D26"
|
||||||
|
d="M126.615 72.31l-7.034-21.647L105.64 7.76c-.716-2.206-3.84-2.206-4.556 0l-13.94 42.903H40.856L26.916 7.76c-.717-2.206-3.84-2.206-4.557 0L8.42 50.664 1.385 72.31a4.792 4.792 0 001.74 5.358L64 121.894l60.874-44.227a4.793 4.793 0 001.74-5.357"
|
||||||
|
/><path fill="#E24329" d="M64 121.894l23.144-71.23H40.856L64 121.893z" /><path
|
||||||
|
fill="#FC6D26"
|
||||||
|
d="M64 121.894l-23.144-71.23H8.42L64 121.893z"
|
||||||
|
/><path
|
||||||
|
fill="#FCA326"
|
||||||
|
d="M8.42 50.663L1.384 72.31a4.79 4.79 0 001.74 5.357L64 121.894 8.42 50.664z"
|
||||||
|
/><path
|
||||||
|
fill="#E24329"
|
||||||
|
d="M8.42 50.663h32.436L26.916 7.76c-.717-2.206-3.84-2.206-4.557 0L8.42 50.664z"
|
||||||
|
/><path fill="#FC6D26" d="M64 121.894l23.144-71.23h32.437L64 121.893z" /><path
|
||||||
|
fill="#FCA326"
|
||||||
|
d="M119.58 50.663l7.035 21.647a4.79 4.79 0 01-1.74 5.357L64 121.894l55.58-71.23z"
|
||||||
|
/><path
|
||||||
|
fill="#E24329"
|
||||||
|
d="M119.58 50.663H87.145l13.94-42.902c.717-2.206 3.84-2.206 4.557 0l13.94 42.903z"
|
||||||
|
/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1009 B |
73
apps/ui/src/lib/container/status.ts
Normal file
73
apps/ui/src/lib/container/status.ts
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
//
|
||||||
|
// Maps Container ID x Operation Status
|
||||||
|
//
|
||||||
|
// Example response of $status => {'123asdf': 'degraded', '124asdf': 'running'}
|
||||||
|
|
||||||
|
import { writable, get as getStore } from 'svelte/store';
|
||||||
|
import { get } from '$lib/api';
|
||||||
|
|
||||||
|
export let containerStatus = writable({});
|
||||||
|
|
||||||
|
let PERMITED_STATUS = ['loading', 'running', 'healthy', 'building', 'degraded', 'stopped', 'error'];
|
||||||
|
|
||||||
|
// refreshStatus([{id}])
|
||||||
|
export async function refreshStatus(list: Array<any>) {
|
||||||
|
for (const item of list) {
|
||||||
|
setStatus(item.id, 'loading');
|
||||||
|
getStatus(item, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getStatus(resource: any, force: boolean = false) {
|
||||||
|
const { id, buildPack, dualCerts, engine, simpleDockerfile } = resource;
|
||||||
|
let newStatus = 'stopped';
|
||||||
|
|
||||||
|
// Already set and we're not forcing
|
||||||
|
if (getStore(containerStatus)[id] && !force) return getStore(containerStatus)[id];
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (buildPack || simpleDockerfile) { // Application
|
||||||
|
const response = await get(`/applications/${id}/status`);
|
||||||
|
newStatus = parseApplicationsResponse(response);
|
||||||
|
} else if (typeof dualCerts !== 'undefined') { // Service
|
||||||
|
const response = await get(`/services/${id}/status`);
|
||||||
|
newStatus = parseServiceResponse(response);
|
||||||
|
} else if (typeof engine !== 'undefined') { // Destination/Server
|
||||||
|
const response = await get(`/destinations/${id}/status`);
|
||||||
|
newStatus = response.isRunning ? 'running' : 'stopped';
|
||||||
|
} else { // Database
|
||||||
|
const response = await get(`/databases/${id}/status`);
|
||||||
|
newStatus = response.isRunning ? 'running' : 'stopped';
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
newStatus = 'error';
|
||||||
|
}
|
||||||
|
|
||||||
|
setStatus(id, newStatus);
|
||||||
|
// console.log("GOT:", id, newStatus)
|
||||||
|
return newStatus
|
||||||
|
}
|
||||||
|
|
||||||
|
const setStatus = (thingId, newStatus) => {
|
||||||
|
if (!PERMITED_STATUS.includes(newStatus))
|
||||||
|
throw (`Change to ${newStatus} is not permitted. Try: ${PERMITED_STATUS.join(', ')}`);
|
||||||
|
containerStatus.update(n => Object.assign(n, { thingId: newStatus }));
|
||||||
|
};
|
||||||
|
|
||||||
|
// -- Response Parsing
|
||||||
|
|
||||||
|
function parseApplicationsResponse(list: Array<any>) {
|
||||||
|
if (list.length === 0) return 'stopped';
|
||||||
|
if (list.length === 1) return list[0].status.isRunning ? 'running' : 'stopped';
|
||||||
|
return allWorking(list.map((el: any) => el.status.isRunning))
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseServiceResponse(response: any) {
|
||||||
|
if (Object.keys(response).length === 0) return 'stopped';
|
||||||
|
let list = Object.keys(response).map((el) => el.status.isRunning)
|
||||||
|
return allWorking(list) ? 'running' : 'degraded'
|
||||||
|
}
|
||||||
|
|
||||||
|
function allWorking(list: Array<any>) {
|
||||||
|
return list.reduce((acum: boolean, res: boolean) => acum && res) ? 'running' : 'degraded';
|
||||||
|
}
|
@ -1,4 +1,7 @@
|
|||||||
{
|
{
|
||||||
"fr": "Français",
|
"fr": "Français",
|
||||||
|
"pt": "Português",
|
||||||
|
"es": "Espanhol",
|
||||||
|
"ko": "Korean",
|
||||||
"en": "English"
|
"en": "English"
|
||||||
}
|
}
|
||||||
|
341
apps/ui/src/lib/locales/es.json
Normal file
341
apps/ui/src/lib/locales/es.json
Normal file
@ -0,0 +1,341 @@
|
|||||||
|
{
|
||||||
|
"layout":{
|
||||||
|
"update_done":"Actualización completada.",
|
||||||
|
"wait_new_version_startup":"Esperando que comience la nueva versión.",
|
||||||
|
"new_version":"Nueva versión accesible. Recargando.",
|
||||||
|
"switch_to_a_different_team":"Cambia a otro equipo.",
|
||||||
|
"update_available":"Actualización disponible"
|
||||||
|
},
|
||||||
|
"error":{
|
||||||
|
"you_can_find_your_way_back":"Puedes encontrar tu camino de vuelta",
|
||||||
|
"here":"Aquí.",
|
||||||
|
"you_are_lost":"¡Estás perdido! ¡Pero no tengas miedo!"
|
||||||
|
},
|
||||||
|
"index":{
|
||||||
|
"dashboard":"Dashboard",
|
||||||
|
"applications":"Aplicaciones",
|
||||||
|
"destinations":"Destinos",
|
||||||
|
"git_sources":"Fuentes Git",
|
||||||
|
"databases":"Bases de datos",
|
||||||
|
"services":"Servicios",
|
||||||
|
"teams":"Equipos",
|
||||||
|
"not_implemented_yet":"Aún no se ha aplicado",
|
||||||
|
"database":"Base de datos",
|
||||||
|
"settings":"Ajustes",
|
||||||
|
"global_settings":"Ajustes mundiales",
|
||||||
|
"secret":"Secret",
|
||||||
|
"team":"Equipo",
|
||||||
|
"logout":"Cerrar sesión"
|
||||||
|
},
|
||||||
|
"login":{
|
||||||
|
"already_logged_in":"Ya se ha registrado.",
|
||||||
|
"authenticating":"Autenticando.",
|
||||||
|
"login":"Iniciar sesión"
|
||||||
|
},
|
||||||
|
"forms":{
|
||||||
|
"password":"Contraseña",
|
||||||
|
"email":"Dirección de correo electrónico",
|
||||||
|
"passwords_not_match":"Las contraseñas no coinciden.",
|
||||||
|
"password_again":"Contraseña de nuevo",
|
||||||
|
"save":"Guardar",
|
||||||
|
"saving":"Salvando.",
|
||||||
|
"name":"Nombre",
|
||||||
|
"value":"Valor",
|
||||||
|
"action":"Acciones",
|
||||||
|
"is_required":"es necesario.",
|
||||||
|
"add":"Añadir",
|
||||||
|
"set":"Set",
|
||||||
|
"remove":"Retirar",
|
||||||
|
"path":"Camino",
|
||||||
|
"confirm_continue":"¿Estás seguro de continuar?",
|
||||||
|
"must_be_stopped_to_modify":"Debe ser detenido para modificar.",
|
||||||
|
"port":"Puerto",
|
||||||
|
"default":"Por defecto",
|
||||||
|
"base_directory":"Base Directory",
|
||||||
|
"publish_directory":"Publish Directory",
|
||||||
|
"generated_automatically_after_start":"Generado automáticamente después del inicio",
|
||||||
|
"roots_password":"La contraseña de Root",
|
||||||
|
"root_user":"Usuario raíz",
|
||||||
|
"eg":"eg",
|
||||||
|
"user":"Usuario",
|
||||||
|
"loading":"Carga.",
|
||||||
|
"version":"Versión",
|
||||||
|
"host":"Host",
|
||||||
|
"already_used_for":"##########################################################################################################################################################################################################################################################",
|
||||||
|
"configuration":"Configuración",
|
||||||
|
"engine":"Motor",
|
||||||
|
"network":"Red",
|
||||||
|
"ip_address":"Dirección IP",
|
||||||
|
"ssh_private_key":"SSH Clave privada",
|
||||||
|
"type":"Tipo",
|
||||||
|
"html_url":"URL",
|
||||||
|
"api_url":"API",
|
||||||
|
"organization":"Organización",
|
||||||
|
"new_password":"Nueva contraseña",
|
||||||
|
"super_secure_new_password":"Super seguro nueva contraseña",
|
||||||
|
"submit":"Submit",
|
||||||
|
"default_email_address":"Dirección de correo electrónico predeterminada",
|
||||||
|
"default_password":"Contraseña predeterminada",
|
||||||
|
"username":"Nombre de usuario",
|
||||||
|
"root_db_user":"Root DB Usuario",
|
||||||
|
"root_db_password":"Root DB Contraseña",
|
||||||
|
"api_port":"API Port",
|
||||||
|
"verifying":"Verificación",
|
||||||
|
"verify_emails_without_smtp":"Verificar correos electrónicos sin SMTP",
|
||||||
|
"extra_config":"Extra Config",
|
||||||
|
"select_a_service":"Seleccione un Servicio",
|
||||||
|
"select_a_service_version":"Seleccione una versión de servicio",
|
||||||
|
"removing":"Retirándose.",
|
||||||
|
"remove_domain":"Eliminar el dominio",
|
||||||
|
"public_port_range":"Public Port Range",
|
||||||
|
"public_port_range_explainer":"Puertos utilizados para exponer bases de datos/servicios/servicios internos. Añádalos a su cortafuegos (si es aplicable).Seguido se indicará una gama de puertos, por ejemplo: tachuelas clase='text-settings '9000-9100 seg/span",
|
||||||
|
"no_actions_available":"No se dispone de medidas",
|
||||||
|
"admin_api_key":"Clave de API de Admin"
|
||||||
|
},
|
||||||
|
"register":{
|
||||||
|
"register":"Registro",
|
||||||
|
"registering":"Registro.",
|
||||||
|
"first_user":"Está registrando al primer usuario. Será el administrador de tu instancia de Coolify."
|
||||||
|
},
|
||||||
|
"reset":{
|
||||||
|
"reset_password":"Reset",
|
||||||
|
"invalid_secret_key":"Una llave secreta inválida.",
|
||||||
|
"secret_key":"Secret Key",
|
||||||
|
"find_path_secret_key":"Puedes encontrarlo en ~coolify/.env (COOLIFY_SECRET_KEY)"
|
||||||
|
},
|
||||||
|
"application":{
|
||||||
|
"configuration":{
|
||||||
|
"buildpack":{
|
||||||
|
"choose_this_one":"Elige esta."
|
||||||
|
},
|
||||||
|
"branch_already_in_use":"Esta rama ya es utilizada por otra aplicación. Webhooks no funcionará en este caso para ambas aplicaciones. ¿Seguro que quieres usarlo?",
|
||||||
|
"no_repositories_configured":"No hay repositorios configurados para su aplicación Git.",
|
||||||
|
"configure_it_now":"Configure ahora",
|
||||||
|
"loading_repositories":"Carga de repositorios ...",
|
||||||
|
"select_a_repository":"Seleccione un repositorio",
|
||||||
|
"loading_branches":"Cargando ramas ...",
|
||||||
|
"select_a_repository_first":"Por favor seleccione un repositorio primero",
|
||||||
|
"select_a_branch":"Por favor seleccione una rama",
|
||||||
|
"loading_groups":"Grupos de carga.",
|
||||||
|
"select_a_group":"Seleccione un grupo",
|
||||||
|
"loading_projects":"Cargando proyectos.",
|
||||||
|
"select_a_project":"Seleccione un proyecto",
|
||||||
|
"no_projects_found":"No se han encontrado proyectos",
|
||||||
|
"no_branches_found":"No hay ramas encontradas",
|
||||||
|
"configure_build_pack":"Configure Build Pack",
|
||||||
|
"scanning_repository_suggest_build_pack":"Repositorio de exploración para sugerir un paquete de construcción para usted.",
|
||||||
|
"found_lock_file":"encontrado archivo de bloqueo para {{packageManager}}.Seguido de comandos predefinidos.",
|
||||||
|
"configure_destination":"Configurar Destino",
|
||||||
|
"no_configurable_destination":"No hay destino configurable encontrado",
|
||||||
|
"select_a_repository_project":"Seleccione un Repositorio / Proyecto",
|
||||||
|
"select_a_git_source":"Seleccione una fuente de Git",
|
||||||
|
"no_configurable_git":"No se encontró una fuente de Git configurable",
|
||||||
|
"configuration_missing":"Falta de configuración"
|
||||||
|
},
|
||||||
|
"build":{
|
||||||
|
"queued_waiting_exec":"Queued and waiting for execution.",
|
||||||
|
"build_logs_of":"Construir registros de",
|
||||||
|
"running":"Corriendo",
|
||||||
|
"queued":"Queued",
|
||||||
|
"finished_in":"Terminado en",
|
||||||
|
"load_more":"Carga más",
|
||||||
|
"no_logs":"No hay registros encontrados",
|
||||||
|
"waiting_logs":"Esperando los registros."
|
||||||
|
},
|
||||||
|
"preview":{
|
||||||
|
"need_during_buildtime":"¿Necesitas durante el tiempo de construcción?",
|
||||||
|
"setup_secret_app_first":"Puede añadir secretos a las implementaciones PR/MR. Por favor, agregue secretos a la aplicación primero. √≠br]Useful for creating יspan class='text-settings 'staging won/span environments.",
|
||||||
|
"values_overwriting_app_secrets":"Estos valores sobrescriben los secretos de aplicación en las implementaciones PR/MR. Útil para la creación de clase 0'text-settings 'estaging significan ambientes / paño.",
|
||||||
|
"redeploy":"Redistribución",
|
||||||
|
"no_previews_available":"No hay vistas previas disponibles"
|
||||||
|
},
|
||||||
|
"secrets":{
|
||||||
|
"secret_saved":"Secreto salvado.",
|
||||||
|
"use_isbuildsecret":"Use isBuildSecret",
|
||||||
|
"secrets_for":"Secretos para"
|
||||||
|
},
|
||||||
|
"storage":{
|
||||||
|
"path_is_required":"Se requiere camino.",
|
||||||
|
"storage_saved":"Almacenamiento guardado.",
|
||||||
|
"storage_updated":"Almacenamiento actualizado.",
|
||||||
|
"storage_deleted":"Almacenamiento eliminado.",
|
||||||
|
"persistent_storage_explainer":"Puede especificar cualquier carpeta que desee ser persistente a través de las implementaciones.Seguido de la clase='text-settings 'ejemplo observado/span significa que preservará <span class='text-settings 'app/example observado/span en el contenedor como ierespan class='text-settings 'appcanta/span es Гspan clase='text-setting directory Esto es útil para almacenar datos tales como una clase de =span='text-settings √≥'database (SQLite) obtenidos/spanilo o a יspan class='text-settings 'cache made/span."
|
||||||
|
},
|
||||||
|
"deployment_queued":"Despliegue apagado.",
|
||||||
|
"confirm_to_delete":"¿Estás seguro de que te gustaría borrar?",
|
||||||
|
"stop_application":"Stop Application",
|
||||||
|
"permission_denied_stop_application":"Usted no tiene permiso para detener la aplicación.",
|
||||||
|
"rebuild_application":"Rebuild Application",
|
||||||
|
"permission_denied_rebuild_application":"No tienes permiso para reconstruir la aplicación.",
|
||||||
|
"build_and_start_application":"Despliegue",
|
||||||
|
"permission_denied_build_and_start_application":"Usted no tiene permiso para implementar la aplicación.",
|
||||||
|
"configurations":"Configuraciones",
|
||||||
|
"secret":"Secretos",
|
||||||
|
"persistent_storage":"Almacenamiento persistente",
|
||||||
|
"previews":"Avances",
|
||||||
|
"logs":"Registros de aplicaciones",
|
||||||
|
"build_logs":"Logs de construcción",
|
||||||
|
"delete_application":"Suprimir",
|
||||||
|
"permission_denied_delete_application":"Usted no tiene permiso para borrar esta aplicación",
|
||||||
|
"domain_already_in_use":"El dominio ya se utiliza.",
|
||||||
|
"dns_not_set_error":"DNS no se establece correctamente ni se propogó para {{domain}}.Seguido manualmente indicando su configuración DNS.",
|
||||||
|
"domain_required":"El dominio es necesario.",
|
||||||
|
"settings_saved":"Configuración guardada.",
|
||||||
|
"dns_not_set_partial_error":"DNS not set",
|
||||||
|
"domain_not_valid":"No puede resolver el dominio o no apunta a la dirección IP del servidor. Por favor, compruebe su configuración de DNS e inténtelo de nuevo.",
|
||||||
|
"git_source":"Fuente de Git",
|
||||||
|
"git_repository":"Repositorio Git",
|
||||||
|
"build_pack":"Paquete de construcción",
|
||||||
|
"base_image":"Imagen de despliegue",
|
||||||
|
"base_image_explainer":"Imagen que se utilizará para el despliegue.",
|
||||||
|
"base_build_image":"Construir imagen",
|
||||||
|
"base_build_image_explainer":"Imagen que se utilizará durante el proceso de construcción.",
|
||||||
|
"destination":"Destino",
|
||||||
|
"application":"Aplicación",
|
||||||
|
"url_fqdn":"URL (FQDN)",
|
||||||
|
"domain_fqdn":"Dominio (FQDN)",
|
||||||
|
"https_explainer":"Si especificas יspan class='text-settings 'https seleccionado/span, la aplicación será accesible sólo en https. Certificado SSL se generará para usted.Seguidobr títuloSi especificas יspan class='text-settings 'www dañado/span, la aplicación será redireccionada (302) de non-www y viceversa.Seguradobr acordadobr título Para modificar el dominio, primero debe detener la aplicación. Debe establecer su DNS para apuntar al servidor IP con antelación.",
|
||||||
|
"ssl_www_and_non_www":"Generar SSL para www y non-www?",
|
||||||
|
"ssl_explainer":"Generará certificados tanto para www como para no www. Necesitas tener las entradas de DNS de la clase 0'' text-settings' inteligenteboth DNS seleccionadas/span título con antelación.Seguridad si esperas tener visitantes en ambos.",
|
||||||
|
"install_command":"Instalar el Comando",
|
||||||
|
"build_command":"Mando de construcción",
|
||||||
|
"start_command":"Comando de Inicio",
|
||||||
|
"directory_to_use_explainer":"Directorio para usar como base para todos los comandos. Podría ser útil con la clase 0'text-settings 'monorepos obtenidos / span.",
|
||||||
|
"publish_directory_explainer":"Directorio que contiene todos los activos para el despliegue. Por ejemplo: \"Clasificación de texto\": \"Clasificación de texto\": \"Clasificación de texto\": \"Clasificación de texto\": \"Clasificación de texto\" (p. ej., p. ej.:",
|
||||||
|
"features":"Características",
|
||||||
|
"enable_automatic_deployment":"Permitir el despliegue automático",
|
||||||
|
"enable_auto_deploy_webhooks":"Permitir el despliegue automático a través de los dispositivos web.",
|
||||||
|
"enable_mr_pr_previews":"Habilitar las previsiones MR/PR",
|
||||||
|
"expose_a_port":"Exponga un puerto",
|
||||||
|
"enable_preview_deploy_mr_pr_requests":"Permitir despliegues de previsualización de las solicitudes de tira o fusión.",
|
||||||
|
"debug_logs":"Debug Logs",
|
||||||
|
"enable_debug_log_during_build":"Activar registros de depuración durante fase de construcción.Seguidobr contactos clase='text-settings .'Informaciones positivas realizadas/span título podría ser visible y guardado en registros.",
|
||||||
|
"cant_activate_auto_deploy_without_repo":"No puede activar implementaciones automáticas hasta que sólo una aplicación se defina para este repositorio / rama.",
|
||||||
|
"no_applications_found":"No se han encontrado aplicaciones",
|
||||||
|
"secret__batch_dot_env":"Paste .env file",
|
||||||
|
"batch_secrets":"Batch añade secretos"
|
||||||
|
},
|
||||||
|
"general":"General",
|
||||||
|
"database":{
|
||||||
|
"default_database":"Base de datos predeterminada",
|
||||||
|
"generated_automatically_after_set_to_public":"Generado automáticamente después de establecerse en público",
|
||||||
|
"connection_string":"Conexión",
|
||||||
|
"set_public":"Ponlo en público",
|
||||||
|
"warning_database_public":"Su base de datos será accesible en Internet. ¡Seguridad en este caso!",
|
||||||
|
"change_append_only_mode":"Cambiar el apéndice sólo modo",
|
||||||
|
"warning_append_only":"Útil si desea restaurar los datos redis de una copia de seguridad. Se requiere el reinicio de la base de datos.",
|
||||||
|
"select_database_type":"Seleccione un tipo de base",
|
||||||
|
"select_database_version":"Seleccione una versión de base de datos",
|
||||||
|
"confirm_stop":"¿Seguro que te gustaría parar?",
|
||||||
|
"stop_database":"Para.",
|
||||||
|
"permission_denied_stop_database":"No tienes permiso para detener la base de datos.",
|
||||||
|
"start_database":"Comienzo",
|
||||||
|
"permission_denied_start_database":"No tienes permiso para iniciar la base de datos.",
|
||||||
|
"delete_database":"Suprimir",
|
||||||
|
"permission_denied_delete_database":"Usted no tiene permiso para eliminar una base de datos",
|
||||||
|
"no_databases_found":"No se encontraron bases de datos",
|
||||||
|
"logs":"Logs"
|
||||||
|
},
|
||||||
|
"destination":{
|
||||||
|
"delete_destination":"Suprimir",
|
||||||
|
"permission_denied_delete_destination":"Usted no tiene permiso para eliminar este destino",
|
||||||
|
"add_to_coolify":"Añadir a Coolify",
|
||||||
|
"coolify_proxy_stopped":"Coolify Proxy paró!",
|
||||||
|
"coolify_proxy_started":"Coolify Proxy comenzó!",
|
||||||
|
"confirm_restart_proxy":"¿Seguro que quieres reiniciar el proxy? Todo será reconfigurado en ~10 segundos.",
|
||||||
|
"coolify_proxy_restarting":"Enfríe el reinicio de Proxy.",
|
||||||
|
"restarting_please_wait":"Reiniciando. Por favor, espere.",
|
||||||
|
"force_restart_proxy":"Fuerza restart proxy",
|
||||||
|
"use_coolify_proxy":"¿Uso Coolify Proxy?",
|
||||||
|
"no_destination_found":"No hay destino encontrado",
|
||||||
|
"new_error_network_already_exists":"Red {{network}} ya configurado para otro equipo!",
|
||||||
|
"new":{
|
||||||
|
"saving_and_configuring_proxy":"Salvando.",
|
||||||
|
"install_proxy":"Esto instalará un proxy en el destino para permitirle acceder a sus aplicaciones y servicios sin ninguna configuración manual (recomendada para Docker).",
|
||||||
|
"add_new_destination":"Añadir nuevo destino",
|
||||||
|
"predefined_destinations":"Destinos predefinidos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sources":{
|
||||||
|
"local_docker":"Local Docker",
|
||||||
|
"remote_docker":"Remoto Docker",
|
||||||
|
"organization_explainer":"Rellene si desea utilizar una organización como su Fuente Git. De lo contrario su usuario será utilizado."
|
||||||
|
},
|
||||||
|
"source":{
|
||||||
|
"new":{
|
||||||
|
"git_source":"Agregar nueva fuente de Git",
|
||||||
|
"official_providers":"Proveedores oficiales"
|
||||||
|
},
|
||||||
|
"no_git_sources_found":"No hay fuentes de git",
|
||||||
|
"delete_git_source":"Suprimir",
|
||||||
|
"permission_denied":"Usted no tiene permiso para eliminar una Fuente Git",
|
||||||
|
"create_new_app":"Crear nuevo {{name}} App",
|
||||||
|
"change_app_settings":"Cambio {{name}} Configuración de la aplicación",
|
||||||
|
"install_repositories":"Instalar los depósitos",
|
||||||
|
"application_id":"ID de aplicación",
|
||||||
|
"group_name":"Nombre del grupo",
|
||||||
|
"oauth_id":"OAuth ID",
|
||||||
|
"oauth_id_explainer":"El ID OAuth es el identificador único de la aplicación GitLab. Puedes encontrarlo itspan class=' text-settings' √in la URL seleccionada/span título de tu aplicación GitLab OAuth.",
|
||||||
|
"register_oauth_gitlab":"Registrar nueva aplicación OAuth en GitLab",
|
||||||
|
"gitlab":{
|
||||||
|
"self_hosted":"Aplicación a nivel de instalación (auto hospedada)",
|
||||||
|
"user_owned":"Aplicación de propiedad de usuario",
|
||||||
|
"group_owned":"Solicitud de propiedad de un grupo",
|
||||||
|
"gitlab_application_type":"GitLab Application Tipo",
|
||||||
|
"already_configured":"GitLab La aplicación ya está configurada."
|
||||||
|
},
|
||||||
|
"github":{
|
||||||
|
"redirecting":"Redirección a Github."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"services":{
|
||||||
|
"all_email_verified":"Todos los correos electrónicos son verificados. Puedes entrar ahora.",
|
||||||
|
"generate_www_non_www_ssl":"Generará certificados tanto para www como para no www. Necesitas tener las entradas de DNS de la clase='text-settings' inteligenteboth DNS seleccionadas/span título con antelación."
|
||||||
|
},
|
||||||
|
"service":{
|
||||||
|
"stop_service":"Para.",
|
||||||
|
"permission_denied_stop_service":"No tienes permiso para detener el servicio.",
|
||||||
|
"start_service":"Comienzo",
|
||||||
|
"permission_denied_start_service":"No tienes permiso para empezar el servicio.",
|
||||||
|
"delete_service":"Suprimir",
|
||||||
|
"permission_denied_delete_service":"Usted no tiene permiso para borrar un servicio.",
|
||||||
|
"no_service":"No se han encontrado servicios",
|
||||||
|
"logs":"Logs"
|
||||||
|
},
|
||||||
|
"setting":{
|
||||||
|
"change_language":"Cambiar idioma",
|
||||||
|
"permission_denied":"No tienes permiso para hacer esto. \\nAsk un administrador para modificar sus permisos.",
|
||||||
|
"domain_removed":"El dominio eliminado",
|
||||||
|
"ssl_explainer":"Si especificas יspan class='text-settings' confianzahttps realizadas/span, Coolify será accesible sólo en https. Certificado SSL se generará para usted.Se indicará que si especificas <span class='text-settings 'www identificado/span, Coolify será redireccionado (302) de no-www y viceversa. Si cambia un dominio ya establecido, romperá webhooks y otras integraciones! Necesita actualizarlos manualmente.",
|
||||||
|
"must_remove_domain_before_changing":"Debe eliminar el dominio antes de que pueda cambiar esta configuración.",
|
||||||
|
"registration_allowed":"¿Se permite la inscripción?",
|
||||||
|
"registration_allowed_explainer":"Permitir nuevos registros a la solicitud. Se ha apagado después del primer registro.",
|
||||||
|
"coolify_proxy_settings":"Enfriar los ajustes Proxy",
|
||||||
|
"credential_stat_explainer":"Credenciales para \"href=\"{{link}} target=\"_blank\"(s) asignados/página.",
|
||||||
|
"auto_update_enabled":"¿Actualización automática activada?",
|
||||||
|
"auto_update_enabled_explainer":"Activar actualizaciones automáticas para enfriar. Se hará automáticamente detrás de las escenas, si no hay proceso de construcción funcionando.",
|
||||||
|
"generate_www_non_www_ssl":"Generará certificados tanto para www como para no www. Necesitas tener las entradas de la clase='' texto-settings' inteligenteboth DNS seleccionadas/span titulada con antelación.",
|
||||||
|
"is_dns_check_enabled":"¿El cheque DNS está habilitado?",
|
||||||
|
"is_dns_check_enabled_explainer":"Puede deshabilitar el cheque DNS antes de crear certificados SSL.Seguido se indica que el ajuste es útil cuando Coolify está detrás de un proxy o túnel inverso."
|
||||||
|
},
|
||||||
|
"team":{
|
||||||
|
"pending_invitations":"Invitaciones pendientes",
|
||||||
|
"accept":"Aceptar",
|
||||||
|
"delete":"Suprimir",
|
||||||
|
"member":"miembros(s)",
|
||||||
|
"root":"(root)",
|
||||||
|
"invited_with_permissions":"Invitado a las \"clase de texto\"(s)(s)(s)(s)(s)(s)(s)(s)(s)(s)(s))(s))(s))(sp))(sp.",
|
||||||
|
"members":"Miembros",
|
||||||
|
"root_team_explainer":"Este es el equipo de la clase de la clase 0'text-red-500 'raíz seleccionada/span. Esto significa que los miembros de este grupo pueden gestionar la configuración de instancia amplia y tener todos los priviliges en Coolify (imagina como usuario raíz en Linux.)",
|
||||||
|
"permission":"Permiso",
|
||||||
|
"you":"Tú",
|
||||||
|
"promote_to":"Promover a {{grade}}",
|
||||||
|
"revoke_invitation":"Revocar la invitación",
|
||||||
|
"pending_invitation":"Invitación pendiente",
|
||||||
|
"invite_new_member":"Invitar nuevo miembro",
|
||||||
|
"send_invitation":"Enviar invitación",
|
||||||
|
"invite_only_register_explainer":"Sólo puedes invitar a usuarios registrados.",
|
||||||
|
"admin":"Admin",
|
||||||
|
"read":"Leer"
|
||||||
|
}
|
||||||
|
}
|
341
apps/ui/src/lib/locales/ko.json
Normal file
341
apps/ui/src/lib/locales/ko.json
Normal file
@ -0,0 +1,341 @@
|
|||||||
|
{
|
||||||
|
"layout":{
|
||||||
|
"update_done":"업데이트가 완료되었습니다.",
|
||||||
|
"wait_new_version_startup":"새 버전이 시작되기를 기다리는 중...",
|
||||||
|
"new_version":"새 버전을 사용할 수 있습니다. 새로고침 중...",
|
||||||
|
"switch_to_a_different_team":"다른팀으로 갈아타세요...",
|
||||||
|
"update_available":"업데이트 가능"
|
||||||
|
},
|
||||||
|
"error":{
|
||||||
|
"you_can_find_your_way_back":"돌아갈 길을 찾을 수 있다",
|
||||||
|
"here":"여기",
|
||||||
|
"you_are_lost":"앗 길을 잃으셨군요! 그러나 두려워하지 마십시오!"
|
||||||
|
},
|
||||||
|
"index":{
|
||||||
|
"dashboard":"계기반",
|
||||||
|
"applications":"애플리케이션",
|
||||||
|
"destinations":"목적지",
|
||||||
|
"git_sources":"힘내 소스",
|
||||||
|
"databases":"데이터베이스",
|
||||||
|
"services":"서비스",
|
||||||
|
"teams":"팀",
|
||||||
|
"not_implemented_yet":"아직 구현되지 않음",
|
||||||
|
"database":"데이터 베이스",
|
||||||
|
"settings":"설정",
|
||||||
|
"global_settings":"전역 설정",
|
||||||
|
"secret":"비밀",
|
||||||
|
"team":"팀",
|
||||||
|
"logout":"로그 아웃"
|
||||||
|
},
|
||||||
|
"login":{
|
||||||
|
"already_logged_in":"이미 로그인...",
|
||||||
|
"authenticating":"인증 중...",
|
||||||
|
"login":"로그인"
|
||||||
|
},
|
||||||
|
"forms":{
|
||||||
|
"password":"비밀번호",
|
||||||
|
"email":"이메일 주소",
|
||||||
|
"passwords_not_match":"비밀번호가 일치하지 않습니다.",
|
||||||
|
"password_again":"비밀번호를 다시",
|
||||||
|
"save":"구하다",
|
||||||
|
"saving":"절약...",
|
||||||
|
"name":"이름",
|
||||||
|
"value":"값",
|
||||||
|
"action":"행위",
|
||||||
|
"is_required":"필요합니다.",
|
||||||
|
"add":"추가하다",
|
||||||
|
"set":"세트",
|
||||||
|
"remove":"제거하다",
|
||||||
|
"path":"길",
|
||||||
|
"confirm_continue":"계속하시겠습니까?",
|
||||||
|
"must_be_stopped_to_modify":"수정하려면 중지해야 합니다.",
|
||||||
|
"port":"포트",
|
||||||
|
"default":"기본",
|
||||||
|
"base_directory":"기본 디렉토리",
|
||||||
|
"publish_directory":"디렉토리 게시",
|
||||||
|
"generated_automatically_after_start":"시작 후 자동으로 생성됨",
|
||||||
|
"roots_password":"루트의 비밀번호",
|
||||||
|
"root_user":"루트 사용자",
|
||||||
|
"eg":"예",
|
||||||
|
"user":"사용자",
|
||||||
|
"loading":"로드 중...",
|
||||||
|
"version":"버전",
|
||||||
|
"host":"주최자",
|
||||||
|
"already_used_for":"<span class=\"text-red-500\">{{type}}</span>이(가) 이미 사용됨",
|
||||||
|
"configuration":"구성",
|
||||||
|
"engine":"엔진",
|
||||||
|
"network":"회로망",
|
||||||
|
"ip_address":"IP 주소",
|
||||||
|
"ssh_private_key":"SSH 개인 키",
|
||||||
|
"type":"유형",
|
||||||
|
"html_url":"HTML URL",
|
||||||
|
"api_url":"API URL",
|
||||||
|
"organization":"조직",
|
||||||
|
"new_password":"새 비밀번호",
|
||||||
|
"super_secure_new_password":"매우 안전한 새 비밀번호",
|
||||||
|
"submit":"제출하다",
|
||||||
|
"default_email_address":"기본 이메일 주소",
|
||||||
|
"default_password":"기본 비밀번호",
|
||||||
|
"username":"사용자 이름",
|
||||||
|
"root_db_user":"루트 DB 사용자",
|
||||||
|
"root_db_password":"루트 DB 비밀번호",
|
||||||
|
"api_port":"API 포트",
|
||||||
|
"verifying":"확인 중",
|
||||||
|
"verify_emails_without_smtp":"SMTP 없이 이메일 확인",
|
||||||
|
"extra_config":"추가 구성",
|
||||||
|
"select_a_service":"서비스 선택",
|
||||||
|
"select_a_service_version":"서비스 버전 선택",
|
||||||
|
"removing":"풀이...",
|
||||||
|
"remove_domain":"도메인 제거",
|
||||||
|
"public_port_range":"공용 포트 범위",
|
||||||
|
"public_port_range_explainer":"데이터베이스/서비스/내부 서비스를 노출하는 데 사용되는 포트입니다.<br> 방화벽에 추가합니다(해당되는 경우).<br><br>포트 범위를 지정할 수 있습니다(예: <span class='text-settings '>). 9000-9100</span>",
|
||||||
|
"no_actions_available":"사용 가능한 작업이 없습니다.",
|
||||||
|
"admin_api_key":"관리 API 키"
|
||||||
|
},
|
||||||
|
"register":{
|
||||||
|
"register":"등록하다",
|
||||||
|
"registering":"등록 중...",
|
||||||
|
"first_user":"첫 번째 사용자를 등록하고 있습니다. Coolify 인스턴스의 관리자가 됩니다."
|
||||||
|
},
|
||||||
|
"reset":{
|
||||||
|
"reset_password":"초기화",
|
||||||
|
"invalid_secret_key":"잘못된 비밀 키입니다.",
|
||||||
|
"secret_key":"비밀 키",
|
||||||
|
"find_path_secret_key":"~/coolify/.env(COOLIFY_SECRET_KEY)에서 찾을 수 있습니다."
|
||||||
|
},
|
||||||
|
"application":{
|
||||||
|
"configuration":{
|
||||||
|
"buildpack":{
|
||||||
|
"choose_this_one":"이걸 선택..."
|
||||||
|
},
|
||||||
|
"branch_already_in_use":"이 분기는 이미 다른 응용 프로그램에서 사용하고 있습니다. 이 경우 두 애플리케이션 모두에 대해 Webhook이 작동하지 않습니다. 사용하시겠습니까?",
|
||||||
|
"no_repositories_configured":"Git 애플리케이션에 대해 구성된 저장소가 없습니다.",
|
||||||
|
"configure_it_now":"지금 구성",
|
||||||
|
"loading_repositories":"저장소 로드 중...",
|
||||||
|
"select_a_repository":"저장소를 선택하십시오",
|
||||||
|
"loading_branches":"브랜치 로드 중...",
|
||||||
|
"select_a_repository_first":"먼저 저장소를 선택하십시오",
|
||||||
|
"select_a_branch":"지점을 선택해 주세요",
|
||||||
|
"loading_groups":"그룹 로드 중...",
|
||||||
|
"select_a_group":"그룹을 선택하세요.",
|
||||||
|
"loading_projects":"프로젝트 로드 중...",
|
||||||
|
"select_a_project":"프로젝트를 선택하세요.",
|
||||||
|
"no_projects_found":"프로젝트를 찾을 수 없습니다.",
|
||||||
|
"no_branches_found":"지점을 찾을 수 없습니다",
|
||||||
|
"configure_build_pack":"빌드 팩 구성",
|
||||||
|
"scanning_repository_suggest_build_pack":"빌드 팩을 제안하기 위해 저장소를 검색하는 중...",
|
||||||
|
"found_lock_file":"{{packageManager}}에 대한 잠금 파일을 찾았습니다.<br>사전 정의된 명령 명령에 사용합니다.",
|
||||||
|
"configure_destination":"대상 구성",
|
||||||
|
"no_configurable_destination":"구성 가능한 대상을 찾을 수 없습니다.",
|
||||||
|
"select_a_repository_project":"리포지토리/프로젝트 선택",
|
||||||
|
"select_a_git_source":"Git 소스 선택",
|
||||||
|
"no_configurable_git":"구성 가능한 Git 소스를 찾을 수 없습니다.",
|
||||||
|
"configuration_missing":"구성 누락"
|
||||||
|
},
|
||||||
|
"build":{
|
||||||
|
"queued_waiting_exec":"큐에 넣고 실행을 기다리고 있습니다.",
|
||||||
|
"build_logs_of":"빌드 로그",
|
||||||
|
"running":"달리기",
|
||||||
|
"queued":"대기 중",
|
||||||
|
"finished_in":"완료",
|
||||||
|
"load_more":"더 찾아보기",
|
||||||
|
"no_logs":"로그를 찾을 수 없습니다.",
|
||||||
|
"waiting_logs":"로그를 기다리는 중..."
|
||||||
|
},
|
||||||
|
"preview":{
|
||||||
|
"need_during_buildtime":"빌드 시간에 필요하십니까?",
|
||||||
|
"setup_secret_app_first":"PR/MR 배포에 비밀을 추가할 수 있습니다. 먼저 응용 프로그램에 비밀을 추가하십시오. <br><span class='text-settings '>스테이징</span> 환경을 만드는 데 유용합니다.",
|
||||||
|
"values_overwriting_app_secrets":"이러한 값은 PR/MR 배포에서 애플리케이션 비밀을 덮어씁니다. <span class='text-settings '>스테이징</span> 환경을 만드는 데 유용합니다.",
|
||||||
|
"redeploy":"재배포",
|
||||||
|
"no_previews_available":"사용 가능한 미리보기가 없습니다."
|
||||||
|
},
|
||||||
|
"secrets":{
|
||||||
|
"secret_saved":"비밀이 저장되었습니다.",
|
||||||
|
"use_isbuildsecret":"isBuildSecret 사용",
|
||||||
|
"secrets_for":"비밀"
|
||||||
|
},
|
||||||
|
"storage":{
|
||||||
|
"path_is_required":"경로는 필수 항목입니다.",
|
||||||
|
"storage_saved":"저장용량이 저장되었습니다.",
|
||||||
|
"storage_updated":"스토리지가 업데이트되었습니다.",
|
||||||
|
"storage_deleted":"스토리지가 삭제되었습니다.",
|
||||||
|
"persistent_storage_explainer":"배포 간에 유지하려는 모든 폴더를 지정할 수 있습니다.<br><span class='text-settings '>/example</span>은 <span class='text-settings '>/app/를 보존함을 의미합니다. <span class='text-settings '>/app</span>과 같은 컨테이너의 example</span>은 애플리케이션의 <span class='text-settings '>루트 디렉토리</span>입니다.<br> <br><span class='text-settings '>데이터베이스(SQLite)</span> 또는 <span class='text-settings '>캐시</span>와 같은 데이터를 저장하는 데 유용합니다."
|
||||||
|
},
|
||||||
|
"deployment_queued":"배포가 대기 중입니다.",
|
||||||
|
"confirm_to_delete":"'{{name}}'을(를) 삭제하시겠습니까?",
|
||||||
|
"stop_application":"애플리케이션 중지",
|
||||||
|
"permission_denied_stop_application":"애플리케이션을 중지할 권한이 없습니다.",
|
||||||
|
"rebuild_application":"애플리케이션 재구축",
|
||||||
|
"permission_denied_rebuild_application":"애플리케이션을 다시 빌드할 권한이 없습니다.",
|
||||||
|
"build_and_start_application":"배포",
|
||||||
|
"permission_denied_build_and_start_application":"애플리케이션을 배포할 권한이 없습니다.",
|
||||||
|
"configurations":"구성",
|
||||||
|
"secret":"비밀",
|
||||||
|
"persistent_storage":"영구 스토리지",
|
||||||
|
"previews":"미리보기",
|
||||||
|
"logs":"애플리케이션 로그",
|
||||||
|
"build_logs":"빌드 로그",
|
||||||
|
"delete_application":"삭제",
|
||||||
|
"permission_denied_delete_application":"이 애플리케이션을 삭제할 권한이 없습니다.",
|
||||||
|
"domain_already_in_use":"도메인 {{domain}}은(는) 이미 사용 중입니다.",
|
||||||
|
"dns_not_set_error":"DNS가 올바르게 설정되지 않았거나 {{domain}}에 대해 전파되었습니다.<br><br>DNS 설정을 확인하십시오.",
|
||||||
|
"domain_required":"도메인은 필수 항목입니다.",
|
||||||
|
"settings_saved":"구성이 저장되었습니다.",
|
||||||
|
"dns_not_set_partial_error":"DNS가 설정되지 않았습니다.",
|
||||||
|
"domain_not_valid":"도메인을 확인할 수 없거나 서버 IP 주소를 가리키지 않습니다.<br><br>DNS 구성을 확인하고 다시 시도하십시오.",
|
||||||
|
"git_source":"힘내 소스",
|
||||||
|
"git_repository":"Git 저장소",
|
||||||
|
"build_pack":"빌드 팩",
|
||||||
|
"base_image":"배포 이미지",
|
||||||
|
"base_image_explainer":"배포에 사용할 이미지입니다.",
|
||||||
|
"base_build_image":"빌드 이미지",
|
||||||
|
"base_build_image_explainer":"빌드 프로세스 중에 사용될 이미지입니다.",
|
||||||
|
"destination":"목적지",
|
||||||
|
"application":"신청",
|
||||||
|
"url_fqdn":"URL(FQDN)",
|
||||||
|
"domain_fqdn":"도메인(FQDN)",
|
||||||
|
"https_explainer":"<span class='text-settings '>https</span>를 지정하면 https를 통해서만 애플리케이션에 액세스할 수 있습니다. SSL 인증서가 생성됩니다.<br><span class='text-settings '>www</span>를 지정하면 애플리케이션이 www가 아닌 곳에서 리디렉션(302)되거나 그 반대의 경우도 마찬가지입니다.<br>< br>도메인을 수정하려면 먼저 애플리케이션을 중지해야 합니다.<br><br><span class='text-white '>미리 DNS가 서버 IP를 가리키도록 설정해야 합니다.</span>",
|
||||||
|
"ssl_www_and_non_www":"www 및 www가 없는 SSL을 생성하시겠습니까?",
|
||||||
|
"ssl_explainer":"www 및 non-www 모두에 대한 인증서를 생성합니다. <br>미리 <span class=' text-settings'>두 DNS 항목</span>을 설정해야 합니다.<br><br>두 DNS 항목 모두에 방문자가 있을 것으로 예상되는 경우 유용합니다.",
|
||||||
|
"install_command":"설치 명령",
|
||||||
|
"build_command":"빌드 명령",
|
||||||
|
"start_command":"시작 명령",
|
||||||
|
"directory_to_use_explainer":"모든 명령의 기반으로 사용할 디렉토리입니다.<br><span class='text-settings '>monorepos</span>와 함께 유용할 수 있습니다.",
|
||||||
|
"publish_directory_explainer":"배포를 위한 모든 자산이 포함된 디렉터리입니다. <br> 예: <span class='text-settings '>dist</span>,<span class='text-settings '>_site</span> 또는 <span class='text-settings '>public< /스팬>.",
|
||||||
|
"features":"특징",
|
||||||
|
"enable_automatic_deployment":"자동 배포 활성화",
|
||||||
|
"enable_auto_deploy_webhooks":"웹훅을 통한 자동 배포를 활성화합니다.",
|
||||||
|
"enable_mr_pr_previews":"MR/PR 미리보기 활성화",
|
||||||
|
"expose_a_port":"포트 노출",
|
||||||
|
"enable_preview_deploy_mr_pr_requests":"끌어오기 또는 병합 요청에서 미리보기 배포를 활성화합니다.",
|
||||||
|
"debug_logs":"디버그 로그",
|
||||||
|
"enable_debug_log_during_build":"빌드 단계에서 디버그 로그를 활성화합니다.<br><span class='text-settings '>민감한 정보</span>가 표시되고 로그에 저장될 수 있습니다.",
|
||||||
|
"cant_activate_auto_deploy_without_repo":"이 리포지토리/분기에 대해 하나의 애플리케이션만 정의될 때까지 자동 배포를 활성화할 수 없습니다.",
|
||||||
|
"no_applications_found":"애플리케이션을 찾을 수 없습니다.",
|
||||||
|
"secret__batch_dot_env":".env 파일 붙여넣기",
|
||||||
|
"batch_secrets":"일괄 추가 비밀"
|
||||||
|
},
|
||||||
|
"general":"일반적인",
|
||||||
|
"database":{
|
||||||
|
"default_database":"기본 데이터베이스",
|
||||||
|
"generated_automatically_after_set_to_public":"public으로 설정 후 자동 생성",
|
||||||
|
"connection_string":"연결 문자열",
|
||||||
|
"set_public":"공개 설정",
|
||||||
|
"warning_database_public":"인터넷을 통해 데이터베이스에 연결할 수 있습니다. <br>이 경우 보안을 심각하게 생각하십시오!",
|
||||||
|
"change_append_only_mode":"추가 전용 모드 변경",
|
||||||
|
"warning_append_only":"백업에서 redis 데이터를 복원하려는 경우에 유용합니다.<br><span class=' text-white'>데이터베이스를 다시 시작해야 합니다.</span>",
|
||||||
|
"select_database_type":"데이터베이스 유형 선택",
|
||||||
|
"select_database_version":"데이터베이스 버전 선택",
|
||||||
|
"confirm_stop":"{{name}}을(를) 중지하시겠습니까?",
|
||||||
|
"stop_database":"중지",
|
||||||
|
"permission_denied_stop_database":"데이터베이스를 중지할 권한이 없습니다.",
|
||||||
|
"start_database":"시작",
|
||||||
|
"permission_denied_start_database":"데이터베이스를 시작할 권한이 없습니다.",
|
||||||
|
"delete_database":"삭제",
|
||||||
|
"permission_denied_delete_database":"데이터베이스를 삭제할 권한이 없습니다.",
|
||||||
|
"no_databases_found":"데이터베이스를 찾을 수 없습니다.",
|
||||||
|
"logs":"로그"
|
||||||
|
},
|
||||||
|
"destination":{
|
||||||
|
"delete_destination":"삭제",
|
||||||
|
"permission_denied_delete_destination":"이 목적지를 삭제할 권한이 없습니다.",
|
||||||
|
"add_to_coolify":"Coolify에 추가",
|
||||||
|
"coolify_proxy_stopped":"Coolify 프록시가 중지되었습니다!",
|
||||||
|
"coolify_proxy_started":"Coolify 프록시가 시작되었습니다!",
|
||||||
|
"confirm_restart_proxy":"프록시를 다시 시작하시겠습니까? 모든 것이 ~10초 안에 재구성됩니다.",
|
||||||
|
"coolify_proxy_restarting":"Coolify 프록시 다시 시작 중...",
|
||||||
|
"restarting_please_wait":"다시 시작 중입니다... 잠시만 기다려 주십시오...",
|
||||||
|
"force_restart_proxy":"강제 재시작 프록시",
|
||||||
|
"use_coolify_proxy":"Coolify 프록시를 사용하시겠습니까?",
|
||||||
|
"no_destination_found":"목적지를 찾을 수 없습니다",
|
||||||
|
"new_error_network_already_exists":"다른 팀에 대해 네트워크 {{network}}이(가) 이미 구성되었습니다!",
|
||||||
|
"new":{
|
||||||
|
"saving_and_configuring_proxy":"절약...",
|
||||||
|
"install_proxy":"그러면 수동 구성 없이 애플리케이션과 서비스에 액세스할 수 있도록 대상에 프록시가 설치됩니다(Docker에 권장됨).<br><br>데이터베이스에는 자체 프록시가 있습니다.",
|
||||||
|
"add_new_destination":"새 목적지 추가",
|
||||||
|
"predefined_destinations":"사전 정의된 목적지"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sources":{
|
||||||
|
"local_docker":"로컬 도커",
|
||||||
|
"remote_docker":"원격 도커",
|
||||||
|
"organization_explainer":"조직을 Git 소스로 사용하려면 입력하십시오. 그렇지 않으면 사용자가 사용됩니다."
|
||||||
|
},
|
||||||
|
"source":{
|
||||||
|
"new":{
|
||||||
|
"git_source":"새 Git 소스 추가",
|
||||||
|
"official_providers":"공식 제공업체"
|
||||||
|
},
|
||||||
|
"no_git_sources_found":"git 소스를 찾을 수 없습니다.",
|
||||||
|
"delete_git_source":"삭제",
|
||||||
|
"permission_denied":"Git 소스를 삭제할 권한이 없습니다.",
|
||||||
|
"create_new_app":"새 {{name}} 앱 만들기",
|
||||||
|
"change_app_settings":"{{name}} 앱 설정 변경",
|
||||||
|
"install_repositories":"저장소 설치",
|
||||||
|
"application_id":"애플리케이션 ID",
|
||||||
|
"group_name":"그룹 이름",
|
||||||
|
"oauth_id":"인증 ID",
|
||||||
|
"oauth_id_explainer":"OAuth ID는 GitLab 애플리케이션의 고유 식별자입니다. <br>GitLab OAuth 애플리케이션의 <span class=' text-settings' >URL</span>에서 찾을 수 있습니다.",
|
||||||
|
"register_oauth_gitlab":"GitLab에 새 OAuth 애플리케이션 등록",
|
||||||
|
"gitlab":{
|
||||||
|
"self_hosted":"인스턴스 전체 애플리케이션(자체 호스팅)",
|
||||||
|
"user_owned":"사용자 소유 애플리케이션",
|
||||||
|
"group_owned":"그룹 소유 애플리케이션",
|
||||||
|
"gitlab_application_type":"GitLab 애플리케이션 유형",
|
||||||
|
"already_configured":"GitLab 앱이 이미 구성되어 있습니다."
|
||||||
|
},
|
||||||
|
"github":{
|
||||||
|
"redirecting":"Github으로 리디렉션 중..."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"services":{
|
||||||
|
"all_email_verified":"모든 이메일이 확인되었습니다. 지금 로그인할 수 있습니다.",
|
||||||
|
"generate_www_non_www_ssl":"www 및 non-www 모두에 대한 인증서를 생성합니다. <br>미리 <span class='text-settings'>두 DNS 항목</span>을 설정해야 합니다.<br><br>서비스를 다시 시작해야 합니다."
|
||||||
|
},
|
||||||
|
"service":{
|
||||||
|
"stop_service":"중지",
|
||||||
|
"permission_denied_stop_service":"서비스를 중지할 권한이 없습니다.",
|
||||||
|
"start_service":"시작",
|
||||||
|
"permission_denied_start_service":"서비스를 시작할 권한이 없습니다.",
|
||||||
|
"delete_service":"삭제",
|
||||||
|
"permission_denied_delete_service":"서비스를 삭제할 권한이 없습니다.",
|
||||||
|
"no_service":"서비스를 찾을 수 없습니다.",
|
||||||
|
"logs":"로그"
|
||||||
|
},
|
||||||
|
"setting":{
|
||||||
|
"change_language":"언어 변경",
|
||||||
|
"permission_denied":"이 작업을 수행할 권한이 없습니다. \\n관리자에게 권한 수정을 요청하세요.",
|
||||||
|
"domain_removed":"도메인이 삭제됨",
|
||||||
|
"ssl_explainer":"<span class='text-settings'>https</span>를 지정하면 Coolify는 https를 통해서만 액세스할 수 있습니다. SSL 인증서가 자동으로 생성됩니다.<br><span class='text-settings '>www</span>를 지정하면 Coolify가 www가 아닌 곳에서 리디렉션(302)되거나 그 반대의 경우도 마찬가지입니다.<br><br ><span class='text-settings '>경고:</span> 이미 설정된 도메인을 변경하면 웹훅 및 기타 통합이 중단됩니다! 수동으로 업데이트해야 합니다.",
|
||||||
|
"must_remove_domain_before_changing":"이 설정을 변경하려면 먼저 도메인을 제거해야 합니다.",
|
||||||
|
"registration_allowed":"등록이 허용됩니까?",
|
||||||
|
"registration_allowed_explainer":"애플리케이션에 대한 추가 등록을 허용합니다. <br>최초 등록 후에는 꺼져 있습니다.",
|
||||||
|
"coolify_proxy_settings":"Coolify 프록시 설정",
|
||||||
|
"credential_stat_explainer":"<a class=\"text-white \" href=\"{{link}}\" target=\"_blank\">통계</a> 페이지에 대한 자격 증명입니다.",
|
||||||
|
"auto_update_enabled":"자동 업데이트가 활성화되었습니까?",
|
||||||
|
"auto_update_enabled_explainer":"Coolify에 대한 자동 업데이트를 활성화합니다. 실행 중인 빌드 프로세스가 없는 경우 배후에서 자동으로 수행됩니다.",
|
||||||
|
"generate_www_non_www_ssl":"www 및 non-www 모두에 대한 인증서를 생성합니다. <br>미리 <span class=' text-settings'>두 DNS 항목</span>을 설정해야 합니다.",
|
||||||
|
"is_dns_check_enabled":"DNS 확인이 활성화되었습니까?",
|
||||||
|
"is_dns_check_enabled_explainer":"SSL 인증서를 생성하기 전에 DNS 확인을 비활성화할 수 있습니다.<br><br>Coolify가 역방향 프록시 또는 터널 뒤에 있을 때 비활성화하는 것이 유용합니다."
|
||||||
|
},
|
||||||
|
"team":{
|
||||||
|
"pending_invitations":"대기 중인 초대",
|
||||||
|
"accept":"수용하다",
|
||||||
|
"delete":"삭제",
|
||||||
|
"member":"회원",
|
||||||
|
"root":"(뿌리)",
|
||||||
|
"invited_with_permissions":"<span class=\" text-rose-600\">{{permission}}</span> 권한으로 <span class=\" text-settings\">{{teamName}}</span>에 초대되었습니다.",
|
||||||
|
"members":"회원",
|
||||||
|
"root_team_explainer":"<span class='text-red-500 '>루트</span> 팀입니다. 즉, 이 그룹의 구성원은 인스턴스 전체 설정을 관리하고 Coolify의 모든 권한을 가질 수 있습니다(Linux의 루트 사용자와 같은 경우).",
|
||||||
|
"permission":"허가",
|
||||||
|
"you":"너",
|
||||||
|
"promote_to":"{{grade}}(으)로 승격",
|
||||||
|
"revoke_invitation":"초대 취소",
|
||||||
|
"pending_invitation":"대기 중인 초대",
|
||||||
|
"invite_new_member":"새 회원 초대",
|
||||||
|
"send_invitation":"초대장을 보내다",
|
||||||
|
"invite_only_register_explainer":"등록된 사용자만 초대할 수 있습니다.",
|
||||||
|
"admin":"관리자",
|
||||||
|
"read":"읽다"
|
||||||
|
}
|
||||||
|
}
|
341
apps/ui/src/lib/locales/pt.json
Normal file
341
apps/ui/src/lib/locales/pt.json
Normal file
@ -0,0 +1,341 @@
|
|||||||
|
{
|
||||||
|
"layout":{
|
||||||
|
"update_done":"Atualização completa.",
|
||||||
|
"wait_new_version_startup":"Aguardando a nova versão iniciar...",
|
||||||
|
"new_version":"Nova versão acessível. Recarregando...",
|
||||||
|
"switch_to_a_different_team":"Mudar para uma equipa diferente...",
|
||||||
|
"update_available":"Atualização disponível"
|
||||||
|
},
|
||||||
|
"error":{
|
||||||
|
"you_can_find_your_way_back":"Você pode encontrar o seu caminho de volta",
|
||||||
|
"here":"aqui",
|
||||||
|
"you_are_lost":"Ooops você está perdido! Mas não tenha medo!"
|
||||||
|
},
|
||||||
|
"index":{
|
||||||
|
"dashboard":"Painel",
|
||||||
|
"applications":"Formulários",
|
||||||
|
"destinations":"Destinos",
|
||||||
|
"git_sources":"Fontes Git",
|
||||||
|
"databases":"Bancos de dados",
|
||||||
|
"services":"Serviços",
|
||||||
|
"teams":"Equipes",
|
||||||
|
"not_implemented_yet":"Ainda não implementado",
|
||||||
|
"database":"Base de dados",
|
||||||
|
"settings":"Definições",
|
||||||
|
"global_settings":"Configurações globais",
|
||||||
|
"secret":"Segredo",
|
||||||
|
"team":"Equipe",
|
||||||
|
"logout":"Sair"
|
||||||
|
},
|
||||||
|
"login":{
|
||||||
|
"already_logged_in":"Já logado...",
|
||||||
|
"authenticating":"Autenticando...",
|
||||||
|
"login":"Conecte-se"
|
||||||
|
},
|
||||||
|
"forms":{
|
||||||
|
"password":"Senha",
|
||||||
|
"email":"Endereço de email",
|
||||||
|
"passwords_not_match":"As senhas não coincidem.",
|
||||||
|
"password_again":"Senha novamente",
|
||||||
|
"save":"Salvar",
|
||||||
|
"saving":"Salvando...",
|
||||||
|
"name":"Nome",
|
||||||
|
"value":"Valor",
|
||||||
|
"action":"Ações",
|
||||||
|
"is_required":"É necessário.",
|
||||||
|
"add":"Adicionar",
|
||||||
|
"set":"Definir",
|
||||||
|
"remove":"Remover",
|
||||||
|
"path":"Caminho",
|
||||||
|
"confirm_continue":"Tem certeza de continuar?",
|
||||||
|
"must_be_stopped_to_modify":"Deve ser parado para modificar.",
|
||||||
|
"port":"Porta",
|
||||||
|
"default":"predefinição",
|
||||||
|
"base_directory":"Diretório base",
|
||||||
|
"publish_directory":"Publicar diretório",
|
||||||
|
"generated_automatically_after_start":"Gerado automaticamente após o início",
|
||||||
|
"roots_password":"Senha do Root",
|
||||||
|
"root_user":"Usuário raiz",
|
||||||
|
"eg":"por exemplo",
|
||||||
|
"user":"Do utilizador",
|
||||||
|
"loading":"Carregando...",
|
||||||
|
"version":"Versão",
|
||||||
|
"host":"Hospedeiro",
|
||||||
|
"already_used_for":"<span class=\"text-red-500\">{{type}}</span> já usado para",
|
||||||
|
"configuration":"Configuração",
|
||||||
|
"engine":"Motor",
|
||||||
|
"network":"Rede",
|
||||||
|
"ip_address":"Endereço de IP",
|
||||||
|
"ssh_private_key":"Chave privada SSH",
|
||||||
|
"type":"Modelo",
|
||||||
|
"html_url":"URL HTML",
|
||||||
|
"api_url":"URL da API",
|
||||||
|
"organization":"Organização",
|
||||||
|
"new_password":"Nova Senha",
|
||||||
|
"super_secure_new_password":"Nova senha super segura",
|
||||||
|
"submit":"Enviar",
|
||||||
|
"default_email_address":"Endereço de e-mail padrão",
|
||||||
|
"default_password":"Senha padrão",
|
||||||
|
"username":"Nome de usuário",
|
||||||
|
"root_db_user":"Usuário raiz do banco de dados",
|
||||||
|
"root_db_password":"Senha do banco de dados raiz",
|
||||||
|
"api_port":"Porta API",
|
||||||
|
"verifying":"Verificando",
|
||||||
|
"verify_emails_without_smtp":"Verifique e-mails sem SMTP",
|
||||||
|
"extra_config":"Configuração extra",
|
||||||
|
"select_a_service":"Selecione um serviço",
|
||||||
|
"select_a_service_version":"Selecione uma versão do serviço",
|
||||||
|
"removing":"Removendo...",
|
||||||
|
"remove_domain":"Remover domínio",
|
||||||
|
"public_port_range":"Intervalo de portas públicas",
|
||||||
|
"public_port_range_explainer":"Portas usadas para expor bancos de dados/serviços/serviços internos.<br> Adicione-os ao seu firewall (se aplicável).<br><br>Você pode especificar um intervalo de portas, por exemplo: <span class='text-settings '> 9000-9100</span>",
|
||||||
|
"no_actions_available":"Nenhuma ação disponível",
|
||||||
|
"admin_api_key":"Chave de API de administrador"
|
||||||
|
},
|
||||||
|
"register":{
|
||||||
|
"register":"Registro",
|
||||||
|
"registering":"Registrando...",
|
||||||
|
"first_user":"Você está registrando o primeiro usuário. Será o administrador da sua instância Coolify."
|
||||||
|
},
|
||||||
|
"reset":{
|
||||||
|
"reset_password":"Redefinir",
|
||||||
|
"invalid_secret_key":"Chave secreta inválida.",
|
||||||
|
"secret_key":"Chave secreta",
|
||||||
|
"find_path_secret_key":"Você pode encontrá-lo em ~/coolify/.env (COOLIFY_SECRET_KEY)"
|
||||||
|
},
|
||||||
|
"application":{
|
||||||
|
"configuration":{
|
||||||
|
"buildpack":{
|
||||||
|
"choose_this_one":"Escolha este..."
|
||||||
|
},
|
||||||
|
"branch_already_in_use":"Esta ramificação já é usada por outro aplicativo. Os webhooks não funcionarão neste caso para ambos os aplicativos. Tem certeza de que deseja usá-lo?",
|
||||||
|
"no_repositories_configured":"Nenhum repositório configurado para seu aplicativo Git.",
|
||||||
|
"configure_it_now":"Configure agora",
|
||||||
|
"loading_repositories":"Carregando repositórios...",
|
||||||
|
"select_a_repository":"Selecione um repositório",
|
||||||
|
"loading_branches":"Carregando ramos...",
|
||||||
|
"select_a_repository_first":"Selecione um repositório primeiro",
|
||||||
|
"select_a_branch":"Selecione uma filial",
|
||||||
|
"loading_groups":"Carregando grupos...",
|
||||||
|
"select_a_group":"Selecione um grupo",
|
||||||
|
"loading_projects":"Carregando projetos...",
|
||||||
|
"select_a_project":"Por favor selecione um projeto",
|
||||||
|
"no_projects_found":"Nenhum projeto encontrado",
|
||||||
|
"no_branches_found":"Nenhuma ramificação encontrada",
|
||||||
|
"configure_build_pack":"Configurar pacote de compilação",
|
||||||
|
"scanning_repository_suggest_build_pack":"Verificando repositório para sugerir um pacote de compilação para você...",
|
||||||
|
"found_lock_file":"Arquivo de bloqueio encontrado para {{packageManager}}.<br>Usando-o para comandos de comandos predefinidos.",
|
||||||
|
"configure_destination":"Configurar destino",
|
||||||
|
"no_configurable_destination":"Nenhum destino configurável encontrado",
|
||||||
|
"select_a_repository_project":"Selecione um Repositório/Projeto",
|
||||||
|
"select_a_git_source":"Selecione uma fonte Git",
|
||||||
|
"no_configurable_git":"Nenhuma fonte Git configurável encontrada",
|
||||||
|
"configuration_missing":"Configuração ausente"
|
||||||
|
},
|
||||||
|
"build":{
|
||||||
|
"queued_waiting_exec":"Na fila e aguardando execução.",
|
||||||
|
"build_logs_of":"Construir registros de",
|
||||||
|
"running":"Corrida",
|
||||||
|
"queued":"Enfileiradas",
|
||||||
|
"finished_in":"Terminando em",
|
||||||
|
"load_more":"Carregue mais",
|
||||||
|
"no_logs":"Nenhum registro encontrado",
|
||||||
|
"waiting_logs":"Aguardando os logs..."
|
||||||
|
},
|
||||||
|
"preview":{
|
||||||
|
"need_during_buildtime":"Precisa durante o tempo de construção?",
|
||||||
|
"setup_secret_app_first":"Você pode adicionar segredos a implantações de PR/MR. Por favor, adicione segredos ao aplicativo primeiro. <br>Útil para criar ambientes de <span class='text-settings '>preparação</span>.",
|
||||||
|
"values_overwriting_app_secrets":"Esses valores substituem os segredos do aplicativo em implantações PR/MR. Útil para criar ambientes de <span class='text-settings '>preparação</span>.",
|
||||||
|
"redeploy":"Reimplantar",
|
||||||
|
"no_previews_available":"Nenhuma visualização disponível"
|
||||||
|
},
|
||||||
|
"secrets":{
|
||||||
|
"secret_saved":"Segredo salvo.",
|
||||||
|
"use_isbuildsecret":"Use isBuildSecret",
|
||||||
|
"secrets_for":"Segredos para"
|
||||||
|
},
|
||||||
|
"storage":{
|
||||||
|
"path_is_required":"O caminho é obrigatório.",
|
||||||
|
"storage_saved":"Armazenamento salvo.",
|
||||||
|
"storage_updated":"Armazenamento atualizado.",
|
||||||
|
"storage_deleted":"Armazenamento excluído.",
|
||||||
|
"persistent_storage_explainer":"Você pode especificar qualquer pasta que deseja que seja persistente nas implantações.<br><span class='text-settings '>/example</span> significa que ela preservará <span class='text-settings '>/app/ example</span> no contêiner, pois <span class='text-settings '>/app</span> é <span class='text-settings '>o diretório raiz</span> para seu aplicativo.<br> <br>Isto é útil para armazenar dados como um <span class='text-settings '>banco de dados (SQLite)</span> ou um <span class='text-settings '>cache</span>."
|
||||||
|
},
|
||||||
|
"deployment_queued":"Implantação em fila.",
|
||||||
|
"confirm_to_delete":"Tem certeza de que deseja excluir '{{name}}'?",
|
||||||
|
"stop_application":"Parar aplicativo",
|
||||||
|
"permission_denied_stop_application":"Você não tem permissão para parar o aplicativo.",
|
||||||
|
"rebuild_application":"Reconstruir aplicativo",
|
||||||
|
"permission_denied_rebuild_application":"Você não tem permissão para reconstruir o aplicativo.",
|
||||||
|
"build_and_start_application":"Implantar",
|
||||||
|
"permission_denied_build_and_start_application":"Você não tem permissão para implantar o aplicativo.",
|
||||||
|
"configurations":"Configurações",
|
||||||
|
"secret":"Segredos",
|
||||||
|
"persistent_storage":"Armazenamento persistente",
|
||||||
|
"previews":"Visualizações",
|
||||||
|
"logs":"Registros de aplicativos",
|
||||||
|
"build_logs":"Construir registros",
|
||||||
|
"delete_application":"Excluir",
|
||||||
|
"permission_denied_delete_application":"Você não tem permissão para excluir este aplicativo",
|
||||||
|
"domain_already_in_use":"O domínio {{domain}} já está em uso.",
|
||||||
|
"dns_not_set_error":"DNS não definido corretamente ou propagado para {{domain}}.<br><br>Verifique suas configurações de DNS.",
|
||||||
|
"domain_required":"O domínio é obrigatório.",
|
||||||
|
"settings_saved":"Configuração salva.",
|
||||||
|
"dns_not_set_partial_error":"DNS não definido",
|
||||||
|
"domain_not_valid":"Não foi possível resolver o domínio ou não está apontando para o endereço IP do servidor.<br><br>Verifique sua configuração de DNS e tente novamente.",
|
||||||
|
"git_source":"Fonte do Git",
|
||||||
|
"git_repository":"Repositório Git",
|
||||||
|
"build_pack":"Pacote de compilação",
|
||||||
|
"base_image":"Imagem de implantação",
|
||||||
|
"base_image_explainer":"Imagem que será usada para a implantação.",
|
||||||
|
"base_build_image":"Construir imagem",
|
||||||
|
"base_build_image_explainer":"Imagem que será usada durante o processo de compilação.",
|
||||||
|
"destination":"Destino",
|
||||||
|
"application":"Inscrição",
|
||||||
|
"url_fqdn":"URL (FQDN)",
|
||||||
|
"domain_fqdn":"Domínio (FQDN)",
|
||||||
|
"https_explainer":"Se você especificar <span class='text-settings '>https</span>, o aplicativo será acessível apenas por https. O certificado SSL será gerado para você.<br>Se você especificar <span class='text-settings '>www</span>, o aplicativo será redirecionado (302) de não www e vice-versa.<br>< br>Para modificar o domínio, você deve primeiro parar o aplicativo.<br><br><span class='text-white '>Você deve configurar seu DNS para apontar para o IP do servidor com antecedência.</span>",
|
||||||
|
"ssl_www_and_non_www":"Gerar SSL para www e não www?",
|
||||||
|
"ssl_explainer":"Ele irá gerar certificados para www e não www. <br>Você precisa ter <span class=' text-settings'>ambas as entradas DNS</span> definidas com antecedência.<br><br>Útil se você espera receber visitantes em ambas.",
|
||||||
|
"install_command":"Comando de instalação",
|
||||||
|
"build_command":"Comando de compilação",
|
||||||
|
"start_command":"Comando Iniciar",
|
||||||
|
"directory_to_use_explainer":"Diretório a ser usado como base para todos os comandos.<br>Pode ser útil com <span class='text-settings '>monorepos</span>.",
|
||||||
|
"publish_directory_explainer":"Diretório contendo todos os ativos para implantação. <br> Por exemplo: <span class='text-settings '>dist</span>,<span class='text-settings '>_site</span> ou <span class='text-settings '>public< /span>.",
|
||||||
|
"features":"Características",
|
||||||
|
"enable_automatic_deployment":"Ativar implantação automática",
|
||||||
|
"enable_auto_deploy_webhooks":"Habilite a implantação automática por meio de webhooks.",
|
||||||
|
"enable_mr_pr_previews":"Ativar visualizações de MR/PR",
|
||||||
|
"expose_a_port":"Expor uma porta",
|
||||||
|
"enable_preview_deploy_mr_pr_requests":"Habilite implantações de visualização de solicitações de pull ou mesclagem.",
|
||||||
|
"debug_logs":"Registros de depuração",
|
||||||
|
"enable_debug_log_during_build":"Ative os registros de depuração durante a fase de compilação.<br><span class='text-settings '>Informações confidenciais</span> podem ser visíveis e salvas nos registros.",
|
||||||
|
"cant_activate_auto_deploy_without_repo":"Não é possível ativar implantações automáticas até que apenas um aplicativo seja definido para este repositório/ramificação.",
|
||||||
|
"no_applications_found":"Nenhum aplicativo encontrado",
|
||||||
|
"secret__batch_dot_env":"Colar arquivo .env",
|
||||||
|
"batch_secrets":"Adicionar segredos em lote"
|
||||||
|
},
|
||||||
|
"general":"Em geral",
|
||||||
|
"database":{
|
||||||
|
"default_database":"Banco de dados padrão",
|
||||||
|
"generated_automatically_after_set_to_public":"Gerado automaticamente após definido como público",
|
||||||
|
"connection_string":"Cadeia de conexão",
|
||||||
|
"set_public":"Defina-o como público",
|
||||||
|
"warning_database_public":"Seu banco de dados estará acessível pela internet. <br>Leve a segurança a sério neste caso!",
|
||||||
|
"change_append_only_mode":"Alterar o modo somente anexar",
|
||||||
|
"warning_append_only":"Útil se você deseja restaurar dados redis de um backup.<br><span class=' text-white'>É necessário reiniciar o banco de dados.</span>",
|
||||||
|
"select_database_type":"Selecione um tipo de banco de dados",
|
||||||
|
"select_database_version":"Selecione uma versão do banco de dados",
|
||||||
|
"confirm_stop":"Tem certeza de que deseja parar {{name}}?",
|
||||||
|
"stop_database":"Pare",
|
||||||
|
"permission_denied_stop_database":"Você não tem permissão para parar o banco de dados.",
|
||||||
|
"start_database":"Começar",
|
||||||
|
"permission_denied_start_database":"Você não tem permissão para iniciar o banco de dados.",
|
||||||
|
"delete_database":"Excluir",
|
||||||
|
"permission_denied_delete_database":"Você não tem permissão para excluir um banco de dados",
|
||||||
|
"no_databases_found":"Nenhum banco de dados encontrado",
|
||||||
|
"logs":"Histórico"
|
||||||
|
},
|
||||||
|
"destination":{
|
||||||
|
"delete_destination":"Excluir",
|
||||||
|
"permission_denied_delete_destination":"Você não tem permissão para excluir este destino",
|
||||||
|
"add_to_coolify":"Adicionar ao Coolify",
|
||||||
|
"coolify_proxy_stopped":"Coolify Proxy parou!",
|
||||||
|
"coolify_proxy_started":"Coolify Proxy iniciado!",
|
||||||
|
"confirm_restart_proxy":"Tem certeza de que deseja reiniciar o proxy? Tudo será reconfigurado em ~10 segundos.",
|
||||||
|
"coolify_proxy_restarting":"Coolify Proxy reiniciando...",
|
||||||
|
"restarting_please_wait":"Reiniciando... aguarde...",
|
||||||
|
"force_restart_proxy":"Forçar reinicialização do proxy",
|
||||||
|
"use_coolify_proxy":"Usar Coolify Proxy?",
|
||||||
|
"no_destination_found":"Nenhum destino encontrado",
|
||||||
|
"new_error_network_already_exists":"Rede {{network}} já configurada para outra equipe!",
|
||||||
|
"new":{
|
||||||
|
"saving_and_configuring_proxy":"Salvando...",
|
||||||
|
"install_proxy":"Isso instalará um proxy no destino para permitir que você acesse seus aplicativos e serviços sem qualquer configuração manual (recomendado para o Docker).<br><br>Os bancos de dados terão seu próprio proxy.",
|
||||||
|
"add_new_destination":"Adicionar novo destino",
|
||||||
|
"predefined_destinations":"Destinos predefinidos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sources":{
|
||||||
|
"local_docker":"Docker local",
|
||||||
|
"remote_docker":"Docker remoto",
|
||||||
|
"organization_explainer":"Preencha-o se quiser usar o de uma organização como seu Git Source. Caso contrário, seu usuário será usado."
|
||||||
|
},
|
||||||
|
"source":{
|
||||||
|
"new":{
|
||||||
|
"git_source":"Adicionar nova fonte Git",
|
||||||
|
"official_providers":"Fornecedores oficiais"
|
||||||
|
},
|
||||||
|
"no_git_sources_found":"Nenhuma fonte git encontrada",
|
||||||
|
"delete_git_source":"Excluir",
|
||||||
|
"permission_denied":"Você não tem permissão para excluir uma fonte Git",
|
||||||
|
"create_new_app":"Criar novo aplicativo {{name}}",
|
||||||
|
"change_app_settings":"Alterar {{name}} configurações do aplicativo",
|
||||||
|
"install_repositories":"Instalar repositórios",
|
||||||
|
"application_id":"ID do aplicativo",
|
||||||
|
"group_name":"Nome do grupo",
|
||||||
|
"oauth_id":"ID OAuth",
|
||||||
|
"oauth_id_explainer":"O OAuth ID é o identificador exclusivo do aplicativo GitLab. <br>Você pode encontrá-lo <span class=' text-settings' >no URL</span> do seu aplicativo GitLab OAuth.",
|
||||||
|
"register_oauth_gitlab":"Registre um novo aplicativo OAuth no GitLab",
|
||||||
|
"gitlab":{
|
||||||
|
"self_hosted":"Aplicativo em toda a instância (auto-hospedado)",
|
||||||
|
"user_owned":"Aplicativo de propriedade do usuário",
|
||||||
|
"group_owned":"Aplicativo de propriedade do grupo",
|
||||||
|
"gitlab_application_type":"Tipo de aplicativo GitLab",
|
||||||
|
"already_configured":"O aplicativo GitLab já está configurado."
|
||||||
|
},
|
||||||
|
"github":{
|
||||||
|
"redirecting":"Redirecionando para o Github..."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"services":{
|
||||||
|
"all_email_verified":"Todos os e-mails são verificados. Você pode entrar agora.",
|
||||||
|
"generate_www_non_www_ssl":"Ele irá gerar certificados para www e não www. <br>Você precisa ter <span class='text-settings'>ambas as entradas DNS</span> definidas com antecedência.<br><br>O serviço precisa ser reiniciado."
|
||||||
|
},
|
||||||
|
"service":{
|
||||||
|
"stop_service":"Pare",
|
||||||
|
"permission_denied_stop_service":"Você não tem permissão para interromper o serviço.",
|
||||||
|
"start_service":"Começar",
|
||||||
|
"permission_denied_start_service":"Você não tem permissão para iniciar o serviço.",
|
||||||
|
"delete_service":"Excluir",
|
||||||
|
"permission_denied_delete_service":"Você não tem permissão para excluir um serviço.",
|
||||||
|
"no_service":"Nenhum serviço encontrado",
|
||||||
|
"logs":"Histórico"
|
||||||
|
},
|
||||||
|
"setting":{
|
||||||
|
"change_language":"Mudar idioma",
|
||||||
|
"permission_denied":"Você não tem permissão para fazer isso. \\nPeça a um administrador para modificar suas permissões.",
|
||||||
|
"domain_removed":"Domínio removido",
|
||||||
|
"ssl_explainer":"Se você especificar <span class='text-settings'>https</span>, o Coolify será acessível apenas por https. O certificado SSL será gerado para você.<br>Se você especificar <span class='text-settings '>www</span>, Coolify será redirecionado (302) de não www e vice-versa.<br><br ><span class='text-settings '>AVISO:</span> Se você alterar um domínio já definido, isso interromperá webhooks e outras integrações! Você precisa atualizá-los manualmente.",
|
||||||
|
"must_remove_domain_before_changing":"Deve remover o domínio antes de alterar esta configuração.",
|
||||||
|
"registration_allowed":"Registro permitido?",
|
||||||
|
"registration_allowed_explainer":"Permitir mais registros no aplicativo. <br>É desligado após o primeiro registro.",
|
||||||
|
"coolify_proxy_settings":"Configurações de proxy do Coolify",
|
||||||
|
"credential_stat_explainer":"Credenciais para a página de <a class=\"text-white \" href=\"{{link}}\" target=\"_blank\">estatísticas</a>.",
|
||||||
|
"auto_update_enabled":"Atualização automática habilitada?",
|
||||||
|
"auto_update_enabled_explainer":"Habilite atualizações automáticas para Coolify. Isso será feito automaticamente nos bastidores, se não houver nenhum processo de compilação em execução.",
|
||||||
|
"generate_www_non_www_ssl":"Ele irá gerar certificados para www e não www. <br>Você precisa ter <span class=' text-settings'>ambas as entradas de DNS</span> configuradas antecipadamente.",
|
||||||
|
"is_dns_check_enabled":"Verificação de DNS habilitada?",
|
||||||
|
"is_dns_check_enabled_explainer":"Você pode desabilitar a verificação de DNS antes de criar certificados SSL.<br><br>Desligá-la é útil quando o Coolify está atrás de um proxy reverso ou túnel."
|
||||||
|
},
|
||||||
|
"team":{
|
||||||
|
"pending_invitations":"Convites pendentes",
|
||||||
|
"accept":"Aceitar",
|
||||||
|
"delete":"Excluir",
|
||||||
|
"member":"membros)",
|
||||||
|
"root":"(raiz)",
|
||||||
|
"invited_with_permissions":"Convidado para <span class=\" text-settings\">{{teamName}}</span> com permissão <span class=\" text-rose-600\">{{permission}}</span>.",
|
||||||
|
"members":"Membros",
|
||||||
|
"root_team_explainer":"Esta é a equipe <span class='text-red-500 '>raiz</span>. Isso significa que os membros deste grupo podem gerenciar as configurações de toda a instância e ter todos os privilégios no Coolify (imagine como usuário root no Linux).",
|
||||||
|
"permission":"Permissão",
|
||||||
|
"you":"Você",
|
||||||
|
"promote_to":"Promover para {{grade}}",
|
||||||
|
"revoke_invitation":"Revogar convite",
|
||||||
|
"pending_invitation":"Convite pendente",
|
||||||
|
"invite_new_member":"Convidar novo membro",
|
||||||
|
"send_invitation":"Enviar convite",
|
||||||
|
"invite_only_register_explainer":"Você só pode convidar usuários registrados.",
|
||||||
|
"admin":"Administrador",
|
||||||
|
"read":"Ler"
|
||||||
|
}
|
||||||
|
}
|
@ -57,14 +57,15 @@ export const appSession: Writable<AppSession> = writable({
|
|||||||
export const disabledButton: Writable<boolean> = writable(false);
|
export const disabledButton: Writable<boolean> = writable(false);
|
||||||
export const isDeploymentEnabled: Writable<boolean> = writable(false);
|
export const isDeploymentEnabled: Writable<boolean> = writable(false);
|
||||||
export function checkIfDeploymentEnabledApplications(isAdmin: boolean, application: any) {
|
export function checkIfDeploymentEnabledApplications(isAdmin: boolean, application: any) {
|
||||||
return (
|
return !!(
|
||||||
isAdmin &&
|
isAdmin &&
|
||||||
(application.buildPack === 'compose') ||
|
(application.buildPack === 'compose') ||
|
||||||
(application.fqdn || application.settings.isBot) &&
|
(application.fqdn || application.settings.isBot) &&
|
||||||
application.gitSource &&
|
((application.gitSource &&
|
||||||
application.repository &&
|
application.repository &&
|
||||||
application.destinationDocker &&
|
application.buildPack) || application.simpleDockerfile) &&
|
||||||
application.buildPack
|
application.destinationDocker
|
||||||
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
export function checkIfDeploymentEnabledServices(isAdmin: boolean, service: any) {
|
export function checkIfDeploymentEnabledServices(isAdmin: boolean, service: any) {
|
||||||
@ -81,6 +82,7 @@ export const status: Writable<any> = writable({
|
|||||||
statuses: [],
|
statuses: [],
|
||||||
overallStatus: 'stopped',
|
overallStatus: 'stopped',
|
||||||
loading: false,
|
loading: false,
|
||||||
|
restarting: false,
|
||||||
initialLoading: true
|
initialLoading: true
|
||||||
},
|
},
|
||||||
service: {
|
service: {
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
import i18n from 'sveltekit-i18n';
|
import i18n from 'sveltekit-i18n';
|
||||||
|
import { derived, writable } from "svelte/store";
|
||||||
import lang from './lang.json';
|
import lang from './lang.json';
|
||||||
|
|
||||||
|
export let currentLocale = writable("en");
|
||||||
|
export let debugTranslation = writable(false);
|
||||||
|
|
||||||
/** @type {import('sveltekit-i18n').Config} */
|
/** @type {import('sveltekit-i18n').Config} */
|
||||||
export const config = {
|
export const config = {
|
||||||
fallbackLocale: 'en',
|
fallbackLocale: 'en',
|
||||||
translations: {
|
translations: {
|
||||||
en: { lang },
|
en: { lang },
|
||||||
|
es: { lang },
|
||||||
|
pt: { lang },
|
||||||
|
ko: { lang },
|
||||||
fr: { lang }
|
fr: { lang }
|
||||||
},
|
},
|
||||||
loaders: [
|
loaders: [
|
||||||
@ -14,12 +21,27 @@ export const config = {
|
|||||||
key: '',
|
key: '',
|
||||||
loader: async () => (await import('./locales/en.json')).default
|
loader: async () => (await import('./locales/en.json')).default
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
locale: 'es',
|
||||||
|
key: '',
|
||||||
|
loader: async () => (await import('./locales/es.json')).default
|
||||||
|
},
|
||||||
|
{
|
||||||
|
locale: 'pt',
|
||||||
|
key: '',
|
||||||
|
loader: async () => (await import('./locales/pt.json')).default
|
||||||
|
},
|
||||||
{
|
{
|
||||||
locale: 'fr',
|
locale: 'fr',
|
||||||
key: '',
|
key: '',
|
||||||
loader: async () => (await import('./locales/fr.json')).default
|
loader: async () => (await import('./locales/fr.json')).default
|
||||||
|
},
|
||||||
|
{
|
||||||
|
locale: 'ko',
|
||||||
|
key: '',
|
||||||
|
loader: async () => (await import('./locales/ko.json')).default
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
||||||
export const { t, locales, locale, loadTranslations } = new i18n(config);
|
export const { t, locales, locale, loadTranslations } = new i18n(config);
|
@ -64,6 +64,8 @@
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
export let settings: any;
|
||||||
|
export let sentryDSN: any;
|
||||||
export let baseSettings: any;
|
export let baseSettings: any;
|
||||||
export let pendingInvitations: any = 0;
|
export let pendingInvitations: any = 0;
|
||||||
|
|
||||||
@ -95,12 +97,24 @@
|
|||||||
import Toasts from '$lib/components/Toasts.svelte';
|
import Toasts from '$lib/components/Toasts.svelte';
|
||||||
import Tooltip from '$lib/components/Tooltip.svelte';
|
import Tooltip from '$lib/components/Tooltip.svelte';
|
||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
|
import LocalePicker from '$lib/components/LocalePicker.svelte';
|
||||||
|
import * as Sentry from '@sentry/svelte';
|
||||||
|
import { BrowserTracing } from '@sentry/tracing';
|
||||||
|
import { dev } from '$app/env';
|
||||||
|
|
||||||
if (userId) $appSession.userId = userId;
|
if (userId) $appSession.userId = userId;
|
||||||
if (teamId) $appSession.teamId = teamId;
|
if (teamId) $appSession.teamId = teamId;
|
||||||
if (permission) $appSession.permission = permission;
|
if (permission) $appSession.permission = permission;
|
||||||
if (isAdmin) $appSession.isAdmin = isAdmin;
|
if (isAdmin) $appSession.isAdmin = isAdmin;
|
||||||
|
// if (settings?.doNotTrack === false) {
|
||||||
|
// Sentry.init({
|
||||||
|
// dsn: sentryDSN,
|
||||||
|
// environment: dev ? 'development' : 'production',
|
||||||
|
// integrations: [new BrowserTracing()],
|
||||||
|
// release: $appSession.version?.toString(),
|
||||||
|
// tracesSampleRate: 0.2
|
||||||
|
// });
|
||||||
|
// }
|
||||||
async function logout() {
|
async function logout() {
|
||||||
try {
|
try {
|
||||||
Cookies.remove('token');
|
Cookies.remove('token');
|
||||||
@ -110,14 +124,16 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
io.connect();
|
if ($appSession.userId) {
|
||||||
io.on('start-service', (message) => {
|
io.connect();
|
||||||
const { serviceId, state } = message;
|
io.on('start-service', (message) => {
|
||||||
$status.service.startup[serviceId] = state;
|
const { serviceId, state } = message;
|
||||||
if (state === 0 || state === 1) {
|
$status.service.startup[serviceId] = state;
|
||||||
delete $status.service.startup[serviceId];
|
if (state === 0 || state === 1) {
|
||||||
}
|
delete $status.service.startup[serviceId];
|
||||||
});
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@ -136,10 +152,16 @@
|
|||||||
<PageLoader />
|
<PageLoader />
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
<div class="drawer">
|
<div class="drawer">
|
||||||
<input id="main-drawer" type="checkbox" class="drawer-toggle" />
|
<input id="main-drawer" type="checkbox" class="drawer-toggle" />
|
||||||
<div class="drawer-content">
|
<div class="drawer-content">
|
||||||
{#if $appSession.userId}
|
{#if $appSession.userId}
|
||||||
|
<Tooltip triggeredBy="#iam" placement="right" color="bg-iam">IAM</Tooltip>
|
||||||
|
<Tooltip triggeredBy="#settings" placement="right" color="bg-settings text-black"
|
||||||
|
>Settings</Tooltip
|
||||||
|
>
|
||||||
|
<Tooltip triggeredBy="#logout" placement="right" color="bg-red-600">Logout</Tooltip>
|
||||||
<nav class="nav-main hidden lg:block z-20">
|
<nav class="nav-main hidden lg:block z-20">
|
||||||
<div class="flex h-screen w-full flex-col items-center transition-all duration-100">
|
<div class="flex h-screen w-full flex-col items-center transition-all duration-100">
|
||||||
{#if !$appSession.whiteLabeled}
|
{#if !$appSession.whiteLabeled}
|
||||||
@ -246,7 +268,7 @@
|
|||||||
<a
|
<a
|
||||||
id="settings"
|
id="settings"
|
||||||
sveltekit:prefetch
|
sveltekit:prefetch
|
||||||
href={$appSession.teamId === '0' ? '/settings/coolify' : '/settings/ssh'}
|
href={$appSession.teamId === '0' ? '/settings/coolify' : '/settings/docker'}
|
||||||
class="icons hover:text-settings"
|
class="icons hover:text-settings"
|
||||||
class:text-settings={$page.url.pathname.startsWith('/settings')}
|
class:text-settings={$page.url.pathname.startsWith('/settings')}
|
||||||
class:bg-coolgray-500={$page.url.pathname.startsWith('/settings')}
|
class:bg-coolgray-500={$page.url.pathname.startsWith('/settings')}
|
||||||
@ -292,6 +314,9 @@
|
|||||||
<path d="M7 12h14l-3 -3m0 6l3 -3" />
|
<path d="M7 12h14l-3 -3m0 6l3 -3" />
|
||||||
</svg>
|
</svg>
|
||||||
</div>
|
</div>
|
||||||
|
<!-- <div class="lg:block">
|
||||||
|
<LocalePicker/>
|
||||||
|
</div> -->
|
||||||
<div
|
<div
|
||||||
class="w-full text-center font-bold text-stone-400 hover:bg-coolgray-200 hover:text-white"
|
class="w-full text-center font-bold text-stone-400 hover:bg-coolgray-200 hover:text-white"
|
||||||
>
|
>
|
||||||
@ -311,19 +336,22 @@
|
|||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
<div
|
<div
|
||||||
class="navbar lg:hidden space-x-2 flex flex-row items-center bg-coollabs"
|
class="navbar lg:hidden space-x-2 flex flex-row justify-between bg-coollabs"
|
||||||
class:hidden={!$appSession.userId}
|
class:hidden={!$appSession.userId}
|
||||||
>
|
>
|
||||||
<label for="main-drawer" class="drawer-button btn btn-square btn-ghost flex-col">
|
<div>
|
||||||
<span class="burger bg-white" />
|
<label for="main-drawer" class="drawer-button btn btn-square btn-ghost flex-col">
|
||||||
<span class="burger bg-white" />
|
<span class="burger bg-white" />
|
||||||
<span class="burger bg-white" />
|
<span class="burger bg-white" />
|
||||||
</label>
|
<span class="burger bg-white" />
|
||||||
<div class="prose flex flex-row justify-between space-x-1 w-full items-center pr-3">
|
</label>
|
||||||
{#if !$appSession.whiteLabeled}
|
<div class="prose flex flex-row justify-between space-x-1 w-full items-center pr-3">
|
||||||
<h3 class="mb-0 text-white">Coolify</h3>
|
{#if !$appSession.whiteLabeled}
|
||||||
{/if}
|
<h3 class="mb-0 text-white">Coolify</h3>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<!-- <LocalePicker /> -->
|
||||||
</div>
|
</div>
|
||||||
<main>
|
<main>
|
||||||
<div class={$appSession.userId ? 'lg:pl-16' : null}>
|
<div class={$appSession.userId ? 'lg:pl-16' : null}>
|
||||||
@ -478,7 +506,3 @@
|
|||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<Tooltip triggeredBy="#iam" placement="right" color="bg-iam">IAM</Tooltip>
|
|
||||||
<Tooltip triggeredBy="#settings" placement="right" color="bg-settings text-black">Settings</Tooltip>
|
|
||||||
<Tooltip triggeredBy="#logout" placement="right" color="bg-red-600">Logout</Tooltip>
|
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
<a
|
<a
|
||||||
id="git"
|
id="git"
|
||||||
href="{application.gitSource.htmlUrl}/{application.repository}/tree/{application.branch}"
|
href="{application.gitSource.htmlUrl}/{application.repository}/tree/{application.branch}"
|
||||||
target="_blank"
|
target="_blank noreferrer"
|
||||||
class="no-underline"
|
class="no-underline"
|
||||||
>
|
>
|
||||||
{#if application.gitSource?.type === 'gitlab'}
|
{#if application.gitSource?.type === 'gitlab'}
|
||||||
@ -135,26 +135,28 @@
|
|||||||
</svg>Persistent Volumes</a
|
</svg>Persistent Volumes</a
|
||||||
>
|
>
|
||||||
</li>
|
</li>
|
||||||
<li
|
{#if !application.simpleDockerfile}
|
||||||
class="rounded"
|
<li
|
||||||
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/features`}
|
class="rounded"
|
||||||
>
|
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/features`}
|
||||||
<a href={`/applications/${$page.params.id}/features`} class="no-underline w-full"
|
|
||||||
><svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
class="w-6 h-6"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke-width="1.5"
|
|
||||||
stroke="currentColor"
|
|
||||||
fill="none"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
|
||||||
<polyline points="13 3 13 10 19 10 11 21 11 14 5 14 13 3" />
|
|
||||||
</svg>Features</a
|
|
||||||
>
|
>
|
||||||
</li>
|
<a href={`/applications/${$page.params.id}/features`} class="no-underline w-full"
|
||||||
|
><svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="w-6 h-6"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<polyline points="13 3 13 10 19 10 11 21 11 14 5 14 13 3" />
|
||||||
|
</svg>Features</a
|
||||||
|
>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
|
|
||||||
<li class="menu-title">
|
<li class="menu-title">
|
||||||
<span>Logs</span>
|
<span>Logs</span>
|
||||||
@ -165,7 +167,9 @@
|
|||||||
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/logs`}
|
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/logs`}
|
||||||
>
|
>
|
||||||
<a
|
<a
|
||||||
href={$status.application.overallStatus !== 'stopped' ? `/applications/${$page.params.id}/logs` : ''}
|
href={$status.application.overallStatus !== 'stopped'
|
||||||
|
? `/applications/${$page.params.id}/logs`
|
||||||
|
: ''}
|
||||||
class="no-underline w-full"
|
class="no-underline w-full"
|
||||||
><svg
|
><svg
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
@ -216,12 +220,40 @@
|
|||||||
<li class="menu-title">
|
<li class="menu-title">
|
||||||
<span>Advanced</span>
|
<span>Advanced</span>
|
||||||
</li>
|
</li>
|
||||||
|
{#if application.gitSourceId}
|
||||||
|
<li
|
||||||
|
class="rounded"
|
||||||
|
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/revert`}
|
||||||
|
>
|
||||||
|
<a href={`/applications/${$page.params.id}/revert`} class="no-underline w-full">
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="w-6 h-6"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<path d="M20 5v14l-12 -7z" />
|
||||||
|
<line x1="4" y1="5" x2="4" y2="19" />
|
||||||
|
</svg>
|
||||||
|
Revert</a
|
||||||
|
>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
<li
|
<li
|
||||||
class="rounded"
|
class="rounded"
|
||||||
class:text-stone-600={$status.application.overallStatus !== 'healthy'}
|
class:text-stone-600={$status.application.overallStatus !== 'healthy'}
|
||||||
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/usage`}
|
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/usage`}
|
||||||
>
|
>
|
||||||
<a href={$status.application.overallStatus === 'healthy' ? `/applications/${$page.params.id}/usage` : ''} class="no-underline w-full"
|
<a
|
||||||
|
href={$status.application.overallStatus === 'healthy'
|
||||||
|
? `/applications/${$page.params.id}/usage`
|
||||||
|
: ''}
|
||||||
|
class="no-underline w-full"
|
||||||
><svg
|
><svg
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
class="w-6 h-6"
|
class="w-6 h-6"
|
||||||
@ -237,7 +269,7 @@
|
|||||||
</svg>Monitoring</a
|
</svg>Monitoring</a
|
||||||
>
|
>
|
||||||
</li>
|
</li>
|
||||||
{#if !application.settings.isBot}
|
{#if !application.settings.isBot && application.gitSourceId}
|
||||||
<li
|
<li
|
||||||
class="rounded"
|
class="rounded"
|
||||||
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/previews`}
|
class:bg-coollabs={$page.url.pathname === `/applications/${$page.params.id}/previews`}
|
||||||
|
@ -2,8 +2,14 @@
|
|||||||
import type { Load } from '@sveltejs/kit';
|
import type { Load } from '@sveltejs/kit';
|
||||||
function checkConfiguration(application: any): string | null {
|
function checkConfiguration(application: any): string | null {
|
||||||
let configurationPhase = null;
|
let configurationPhase = null;
|
||||||
if (!application.gitSourceId) {
|
if (!application.gitSourceId && !application.simpleDockerfile) {
|
||||||
configurationPhase = 'source';
|
return (configurationPhase = 'source');
|
||||||
|
}
|
||||||
|
if (application.simpleDockerfile) {
|
||||||
|
if (!application.destinationDockerId) {
|
||||||
|
configurationPhase = 'destination';
|
||||||
|
}
|
||||||
|
return configurationPhase;
|
||||||
} else if (!application.repository && !application.branch) {
|
} else if (!application.repository && !application.branch) {
|
||||||
configurationPhase = 'repository';
|
configurationPhase = 'repository';
|
||||||
} else if (!application.destinationDockerId) {
|
} else if (!application.destinationDockerId) {
|
||||||
@ -70,8 +76,8 @@
|
|||||||
selectedBuildId
|
selectedBuildId
|
||||||
} from '$lib/store';
|
} from '$lib/store';
|
||||||
import { errorNotification, handlerNotFoundLoad } from '$lib/common';
|
import { errorNotification, handlerNotFoundLoad } from '$lib/common';
|
||||||
import Tooltip from '$lib/components/Tooltip.svelte';
|
|
||||||
import Menu from './_Menu.svelte';
|
import Menu from './_Menu.svelte';
|
||||||
|
import { saveForm } from './utils';
|
||||||
|
|
||||||
let statusInterval: any;
|
let statusInterval: any;
|
||||||
let forceDelete = false;
|
let forceDelete = false;
|
||||||
@ -96,12 +102,25 @@
|
|||||||
|
|
||||||
async function handleDeploySubmit(forceRebuild = false) {
|
async function handleDeploySubmit(forceRebuild = false) {
|
||||||
if (!$isDeploymentEnabled) return;
|
if (!$isDeploymentEnabled) return;
|
||||||
|
if (application.gitCommitHash && !application.settings.isPublicRepository) {
|
||||||
|
const sure = await confirm(
|
||||||
|
`Are you sure you want to deploy a specific commit (${application.gitCommitHash})? This will disable the "Automatic Deployment" feature to prevent accidental overwrites of incoming commits.`
|
||||||
|
);
|
||||||
|
if (!sure) {
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
await post(`/applications/${id}/settings`, {
|
||||||
|
autodeploy: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!statusInterval) {
|
if (!statusInterval) {
|
||||||
statusInterval = setInterval(async () => {
|
statusInterval = setInterval(async () => {
|
||||||
await getStatus();
|
await getStatus();
|
||||||
}, 2000);
|
}, 2000);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
await saveForm(id, application);
|
||||||
const { buildId } = await post(`/applications/${id}/deploy`, {
|
const { buildId } = await post(`/applications/${id}/deploy`, {
|
||||||
...application,
|
...application,
|
||||||
forceRebuild
|
forceRebuild
|
||||||
@ -148,7 +167,8 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function getStatus() {
|
async function getStatus() {
|
||||||
if ($status.application.loading && stopping) return;
|
if (($status.application.loading && stopping) || $status.application.restarting === true)
|
||||||
|
return;
|
||||||
$status.application.loading = true;
|
$status.application.loading = true;
|
||||||
const data = await get(`/applications/${id}/status`);
|
const data = await get(`/applications/${id}/status`);
|
||||||
|
|
||||||
@ -166,24 +186,20 @@
|
|||||||
if ($status.application.statuses.length === 0) {
|
if ($status.application.statuses.length === 0) {
|
||||||
$status.application.overallStatus = 'stopped';
|
$status.application.overallStatus = 'stopped';
|
||||||
} else {
|
} else {
|
||||||
if ($status.application.statuses.length !== numberOfApplications) {
|
for (const oneStatus of $status.application.statuses) {
|
||||||
$status.application.overallStatus = 'degraded';
|
if (oneStatus.status.isExited || oneStatus.status.isRestarting) {
|
||||||
} else {
|
$status.application.overallStatus = 'degraded';
|
||||||
for (const oneStatus of $status.application.statuses) {
|
break;
|
||||||
if (oneStatus.status.isExited || oneStatus.status.isRestarting) {
|
}
|
||||||
$status.application.overallStatus = 'degraded';
|
if (oneStatus.status.isRunning) {
|
||||||
break;
|
$status.application.overallStatus = 'healthy';
|
||||||
}
|
}
|
||||||
if (oneStatus.status.isRunning) {
|
if (
|
||||||
$status.application.overallStatus = 'healthy';
|
!oneStatus.status.isExited &&
|
||||||
}
|
!oneStatus.status.isRestarting &&
|
||||||
if (
|
!oneStatus.status.isRunning
|
||||||
!oneStatus.status.isExited &&
|
) {
|
||||||
!oneStatus.status.isRestarting &&
|
$status.application.overallStatus = 'stopped';
|
||||||
!oneStatus.status.isRunning
|
|
||||||
) {
|
|
||||||
$status.application.overallStatus = 'stopped';
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -244,14 +260,14 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if $page.url.pathname.startsWith(`/applications/${id}/configuration/`)}
|
{#if $page.url.pathname.startsWith(`/applications/${id}/configuration/`)}
|
||||||
<div class="px-2">
|
<div class="px-4">
|
||||||
{#if forceDelete}
|
{#if forceDelete}
|
||||||
<button
|
<button
|
||||||
on:click={() => deleteApplication(application.name, true)}
|
on:click={() => deleteApplication(application.name, true)}
|
||||||
disabled={!$appSession.isAdmin}
|
disabled={!$appSession.isAdmin}
|
||||||
class:bg-red-600={$appSession.isAdmin}
|
class:bg-red-600={$appSession.isAdmin}
|
||||||
class:hover:bg-red-500={$appSession.isAdmin}
|
class:hover:bg-red-500={$appSession.isAdmin}
|
||||||
class="btn btn-sm btn-error text-sm"
|
class="btn btn-sm btn-error hover:bg-red-700 text-sm w-64"
|
||||||
>
|
>
|
||||||
Force Delete Application
|
Force Delete Application
|
||||||
</button>
|
</button>
|
||||||
@ -261,7 +277,7 @@
|
|||||||
disabled={!$appSession.isAdmin}
|
disabled={!$appSession.isAdmin}
|
||||||
class:bg-red-600={$appSession.isAdmin}
|
class:bg-red-600={$appSession.isAdmin}
|
||||||
class:hover:bg-red-500={$appSession.isAdmin}
|
class:hover:bg-red-500={$appSession.isAdmin}
|
||||||
class="btn btn-sm btn-error text-sm"
|
class="btn btn-sm btn-error hover:bg-red-700 text-sm w-64"
|
||||||
>
|
>
|
||||||
Delete Application
|
Delete Application
|
||||||
</button>
|
</button>
|
||||||
@ -438,7 +454,7 @@
|
|||||||
<button
|
<button
|
||||||
class="btn btn-sm gap-2"
|
class="btn btn-sm gap-2"
|
||||||
disabled={!$isDeploymentEnabled}
|
disabled={!$isDeploymentEnabled}
|
||||||
on:click={() => handleDeploySubmit(false)}
|
on:click={() => handleDeploySubmit(true)}
|
||||||
>
|
>
|
||||||
{#if $status.application.overallStatus !== 'degraded'}
|
{#if $status.application.overallStatus !== 'degraded'}
|
||||||
<svg
|
<svg
|
||||||
|
@ -9,6 +9,12 @@
|
|||||||
redirect: `/applications/${params.id}`
|
redirect: `/applications/${params.id}`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
if (application.simpleDockerfile) {
|
||||||
|
return {
|
||||||
|
status: 302,
|
||||||
|
redirect: `/applications/${params.id}`
|
||||||
|
};
|
||||||
|
}
|
||||||
const response = await get(`/applications/${params.id}/configuration/buildpack`);
|
const response = await get(`/applications/${params.id}/configuration/buildpack`);
|
||||||
return {
|
return {
|
||||||
props: {
|
props: {
|
||||||
@ -47,7 +53,7 @@
|
|||||||
|
|
||||||
const { id } = $page.params;
|
const { id } = $page.params;
|
||||||
|
|
||||||
let htmlUrl = application.gitSource.htmlUrl;
|
let htmlUrl = application.gitSource?.htmlUrl || null;
|
||||||
|
|
||||||
let scanning: boolean = true;
|
let scanning: boolean = true;
|
||||||
let foundConfig: any = null;
|
let foundConfig: any = null;
|
||||||
@ -312,30 +318,6 @@
|
|||||||
await getGitlabToken();
|
await getGitlabToken();
|
||||||
}
|
}
|
||||||
scanRepository(isPublicRepository);
|
scanRepository(isPublicRepository);
|
||||||
// let htmlUrl = application.gitSource.htmlUrl;
|
|
||||||
// const left = screen.width / 2 - 1020 / 2;
|
|
||||||
// const top = screen.height / 2 - 618 / 2;
|
|
||||||
// const newWindow = open(
|
|
||||||
// `${htmlUrl}/oauth/authorize?client_id=${
|
|
||||||
// application.gitSource.gitlabApp.appId
|
|
||||||
// }&redirect_uri=${getAPIUrl()}/webhooks/gitlab&response_type=code&scope=api+email+read_repository&state=${
|
|
||||||
// $page.params.id
|
|
||||||
// }`,
|
|
||||||
// 'GitLab',
|
|
||||||
// 'resizable=1, scrollbars=1, fullscreen=0, height=618, width=1020,top=' +
|
|
||||||
// top +
|
|
||||||
// ', left=' +
|
|
||||||
// left +
|
|
||||||
// ', toolbar=0, menubar=0, status=0'
|
|
||||||
// );
|
|
||||||
// const timer = setInterval(() => {
|
|
||||||
// if (newWindow?.closed) {
|
|
||||||
// clearInterval(timer);
|
|
||||||
// $appSession.tokens.gitlab = localStorage.getItem('gitLabToken');
|
|
||||||
// // localStorage.removeItem('gitLabToken' );
|
|
||||||
|
|
||||||
// }
|
|
||||||
// }, 100);
|
|
||||||
}
|
}
|
||||||
} else if (error.message === 'Bad credentials') {
|
} else if (error.message === 'Bad credentials') {
|
||||||
const { token } = await get(`/applications/${id}/configuration/githubToken`);
|
const { token } = await get(`/applications/${id}/configuration/githubToken`);
|
||||||
|
@ -70,7 +70,11 @@
|
|||||||
{$t('application.configuration.no_configurable_destination')}
|
{$t('application.configuration.no_configurable_destination')}
|
||||||
</div>
|
</div>
|
||||||
<div class="flex justify-center">
|
<div class="flex justify-center">
|
||||||
<a href="/destinations/new" sveltekit:prefetch class="add-icon bg-sky-600 hover:bg-sky-500">
|
<a
|
||||||
|
href={`/destinations/new?from=/applications/${id}/configuration/destination`}
|
||||||
|
sveltekit:prefetch
|
||||||
|
class="add-icon bg-sky-600 hover:bg-sky-500"
|
||||||
|
>
|
||||||
<svg
|
<svg
|
||||||
class="w-6"
|
class="w-6"
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
@ -0,0 +1,93 @@
|
|||||||
|
<script context="module" lang="ts">
|
||||||
|
import type { Load } from '@sveltejs/kit';
|
||||||
|
export const load: Load = async ({ fetch, params, url, stuff }) => {
|
||||||
|
try {
|
||||||
|
const { application } = stuff;
|
||||||
|
if (application?.destinationDockerId && !url.searchParams.get('from')) {
|
||||||
|
return {
|
||||||
|
status: 302,
|
||||||
|
redirect: `/applications/${params.id}`
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const response = await get(`/settings`);
|
||||||
|
return {
|
||||||
|
props: {
|
||||||
|
...response
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
status: 500,
|
||||||
|
error: new Error(`Could not load ${url}`)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
|
export let registries: any;
|
||||||
|
import { page } from '$app/stores';
|
||||||
|
import { goto } from '$app/navigation';
|
||||||
|
import { get, post } from '$lib/api';
|
||||||
|
import { errorNotification } from '$lib/common';
|
||||||
|
|
||||||
|
const { id } = $page.params;
|
||||||
|
const from = $page.url.searchParams.get('from');
|
||||||
|
|
||||||
|
async function handleSubmit(registryId: any) {
|
||||||
|
try {
|
||||||
|
await post(`/applications/${id}/configuration/registry`, { registryId });
|
||||||
|
return await goto(from || `/applications/${id}`);
|
||||||
|
} catch (error) {
|
||||||
|
return errorNotification(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="flex flex-col justify-center w-full">
|
||||||
|
<div class="flex flex-col flex-wrap justify-center px-2 md:flex-row mx-auto gap-4">
|
||||||
|
{#if registries.length > 0}
|
||||||
|
{#each registries as registry}
|
||||||
|
<button
|
||||||
|
on:click={() => handleSubmit(registry.id)}
|
||||||
|
class="box-selection hover:bg-primary relative"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="absolute top-0 left-0 -m-4 h-12 w-12 text-sky-500"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<path
|
||||||
|
d="M22 12.54c-1.804 -.345 -2.701 -1.08 -3.523 -2.94c-.487 .696 -1.102 1.568 -.92 2.4c.028 .238 -.32 1.002 -.557 1h-14c0 5.208 3.164 7 6.196 7c4.124 .022 7.828 -1.376 9.854 -5c1.146 -.101 2.296 -1.505 2.95 -2.46z"
|
||||||
|
/>
|
||||||
|
<path d="M5 10h3v3h-3z" />
|
||||||
|
<path d="M8 10h3v3h-3z" />
|
||||||
|
<path d="M11 10h3v3h-3z" />
|
||||||
|
<path d="M8 7h3v3h-3z" />
|
||||||
|
<path d="M11 7h3v3h-3z" />
|
||||||
|
<path d="M11 4h3v3h-3z" />
|
||||||
|
<path d="M4.571 18c1.5 0 2.047 -.074 2.958 -.78" />
|
||||||
|
<line x1="10" y1="16" x2="10" y2="16.01" />
|
||||||
|
</svg>
|
||||||
|
|
||||||
|
<div class="font-bold text-xl text-center truncate">{registry.name}</div>
|
||||||
|
<div class="text-center truncate">{registry.url}</div>
|
||||||
|
</button>
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
<div class="flex flex-col items-center gap-2">
|
||||||
|
<div class="text-center text-xl font-bold pb-4">No registries found.</div>
|
||||||
|
<div class="flex gap-2">
|
||||||
|
<a class="btn btn-sm" href={from || `/applications/${id}`}>Go back</a>
|
||||||
|
<a class="btn btn-sm btn-primary" href={`/settings/docker`}>Add a Docker Registry</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
@ -25,6 +25,8 @@
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
export let sources: any;
|
||||||
|
|
||||||
import { page } from '$app/stores';
|
import { page } from '$app/stores';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { get, post } from '$lib/api';
|
import { get, post } from '$lib/api';
|
||||||
@ -33,11 +35,12 @@
|
|||||||
import { appSession } from '$lib/store';
|
import { appSession } from '$lib/store';
|
||||||
import PublicRepository from './_PublicRepository.svelte';
|
import PublicRepository from './_PublicRepository.svelte';
|
||||||
import DocLink from '$lib/components/DocLink.svelte';
|
import DocLink from '$lib/components/DocLink.svelte';
|
||||||
|
import Beta from '$lib/components/Beta.svelte';
|
||||||
|
|
||||||
const { id } = $page.params;
|
const { id } = $page.params;
|
||||||
const from = $page.url.searchParams.get('from');
|
const from = $page.url.searchParams.get('from');
|
||||||
|
let simpleDockerfile: any = null;
|
||||||
|
|
||||||
export let sources: any;
|
|
||||||
const filteredSources = sources.filter(
|
const filteredSources = sources.filter(
|
||||||
(source: any) =>
|
(source: any) =>
|
||||||
(source.type === 'github' && source.githubAppId && source.githubApp.installationId) ||
|
(source.type === 'github' && source.githubAppId && source.githubApp.installationId) ||
|
||||||
@ -61,18 +64,20 @@
|
|||||||
return errorNotification(error);
|
return errorNotification(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function newSource() {
|
async function handleDockerImage() {
|
||||||
const { id } = await post('/sources/new', {});
|
try {
|
||||||
return await goto(`/sources/${id}`, { replaceState: true });
|
await post(`/applications/${id}/configuration/source`, { simpleDockerfile });
|
||||||
|
return await goto(from || `/applications/${id}/configuration/destination`);
|
||||||
|
} catch (error) {
|
||||||
|
return errorNotification(error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="max-w-screen-2xl mx-auto px-9">
|
<div class="max-w-screen-2xl mx-auto px-9">
|
||||||
{#if !filteredSources}
|
{#if !filteredSources}
|
||||||
<div class="title pb-8">Git App</div>
|
<div class="title pb-8">Git App</div>
|
||||||
{/if}
|
<div class="flex flex-wrap justify-center">
|
||||||
<div class="flex flex-wrap justify-center">
|
|
||||||
{#if !filteredSources}
|
|
||||||
<div class="flex-col">
|
<div class="flex-col">
|
||||||
<div class="pb-2 text-center font-bold">
|
<div class="pb-2 text-center font-bold">
|
||||||
{$t('application.configuration.no_configurable_git')}
|
{$t('application.configuration.no_configurable_git')}
|
||||||
@ -95,7 +100,13 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
</div>
|
||||||
|
{/if}
|
||||||
|
{#if ownSources.length > 0 || otherSources.length > 0}
|
||||||
|
<div class="title pb-8">Integrated with Git App</div>
|
||||||
|
{/if}
|
||||||
|
{#if ownSources.length > 0}
|
||||||
|
<div class="flex flex-wrap justify-center">
|
||||||
<div class="flex flex-col lg:flex-row lg:flex-wrap justify-center">
|
<div class="flex flex-col lg:flex-row lg:flex-wrap justify-center">
|
||||||
{#each ownSources as source}
|
{#each ownSources as source}
|
||||||
<div class="p-2 relative">
|
<div class="p-2 relative">
|
||||||
@ -240,11 +251,25 @@
|
|||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
</div>
|
||||||
</div>
|
{/if}
|
||||||
<div class="flex flex-row items-center">
|
<div class="flex flex-row items-center">
|
||||||
<div class="title py-4 pr-4">Public Repository</div>
|
<div class="title py-4 pr-4">Public Repository from Git</div>
|
||||||
<DocLink url="https://docs.coollabs.io/coolify/applications/#public-repository" />
|
<DocLink url="https://docs.coollabs.io/coolify/applications/#public-repository" />
|
||||||
</div>
|
</div>
|
||||||
<PublicRepository />
|
<PublicRepository />
|
||||||
|
<div class="flex flex-row items-center pt-10">
|
||||||
|
<div class="title py-4 pr-4">Simple Dockerfile <Beta /></div>
|
||||||
|
<DocLink url="https://docs.coollabs.io/coolify/applications/#dockerfile" />
|
||||||
|
</div>
|
||||||
|
<div class="mx-auto max-w-screen-2xl">
|
||||||
|
<form class="flex flex-col" on:submit|preventDefault={handleDockerImage}>
|
||||||
|
<div class="flex flex-col space-y-2 w-full">
|
||||||
|
<div class="flex flex-row space-x-2">
|
||||||
|
<textarea required class="w-full" rows="10" bind:value={simpleDockerfile} />
|
||||||
|
<button class="btn btn-primary" type="submit">Deploy Dockerfile</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -61,7 +61,7 @@
|
|||||||
disabled={!$appSession.isAdmin}
|
disabled={!$appSession.isAdmin}
|
||||||
class:bg-red-600={$appSession.isAdmin}
|
class:bg-red-600={$appSession.isAdmin}
|
||||||
class:hover:bg-red-500={$appSession.isAdmin}
|
class:hover:bg-red-500={$appSession.isAdmin}
|
||||||
class="btn btn-lg btn-error text-sm"
|
class="btn btn-lg btn-error hover:bg-red-700 text-sm w-64"
|
||||||
>
|
>
|
||||||
Force Delete Application
|
Force Delete Application
|
||||||
</button>
|
</button>
|
||||||
@ -71,7 +71,7 @@
|
|||||||
on:click={() => deleteApplication(application.name, false)}
|
on:click={() => deleteApplication(application.name, false)}
|
||||||
type="submit"
|
type="submit"
|
||||||
disabled={!$appSession.isAdmin}
|
disabled={!$appSession.isAdmin}
|
||||||
class="btn btn-lg btn-error hover:bg-red-700 text-sm"
|
class="btn btn-lg btn-error hover:bg-red-700 text-sm w-64"
|
||||||
>
|
>
|
||||||
Delete Application
|
Delete Application
|
||||||
</button>
|
</button>
|
||||||
|
@ -124,7 +124,7 @@
|
|||||||
description={$t('application.enable_auto_deploy_webhooks')}
|
description={$t('application.enable_auto_deploy_webhooks')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
{#if !application.settings.isBot}
|
{#if !application.settings.isBot && !application.simpleDockerfile}
|
||||||
<div class="grid grid-cols-2 items-center">
|
<div class="grid grid-cols-2 items-center">
|
||||||
<Setting
|
<Setting
|
||||||
id="previews"
|
id="previews"
|
||||||
|
@ -50,25 +50,25 @@
|
|||||||
import Explainer from '$lib/components/Explainer.svelte';
|
import Explainer from '$lib/components/Explainer.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import Beta from '$lib/components/Beta.svelte';
|
import Beta from '$lib/components/Beta.svelte';
|
||||||
|
import { saveForm } from './utils';
|
||||||
|
|
||||||
const { id } = $page.params;
|
const { id } = $page.params;
|
||||||
|
|
||||||
$: isDisabled =
|
$: isDisabled =
|
||||||
!$appSession.isAdmin ||
|
!$appSession.isAdmin ||
|
||||||
$status.application.overallStatus === 'degraded' ||
|
$status.application.overallStatus === 'degraded' ||
|
||||||
$status.application.overallStatus === 'healthy' ||
|
$status.application.overallStatus === 'healthy' ||
|
||||||
$status.application.initialLoading;
|
$status.application.initialLoading;
|
||||||
|
|
||||||
$isDeploymentEnabled = checkIfDeploymentEnabledApplications($appSession.isAdmin, application);
|
$isDeploymentEnabled = checkIfDeploymentEnabledApplications($appSession.isAdmin, application);
|
||||||
let statues: any = {};
|
let statues: any = {};
|
||||||
let loading = {
|
let loading = {
|
||||||
save: false,
|
save: false,
|
||||||
reloadCompose: false
|
reloadCompose: false
|
||||||
};
|
};
|
||||||
|
let isSimpleDockerfile = !!application.simpleDockerfile;
|
||||||
let fqdnEl: any = null;
|
let fqdnEl: any = null;
|
||||||
let forceSave = false;
|
let forceSave = false;
|
||||||
let isPublicRepository = application.settings?.isPublicRepository;
|
let isPublicRepository = application.settings?.isPublicRepository;
|
||||||
let apiUrl = application.gitSource.apiUrl;
|
let apiUrl = application.gitSource?.apiUrl;
|
||||||
let branch = application.branch;
|
let branch = application.branch;
|
||||||
let repository = application.repository;
|
let repository = application.repository;
|
||||||
let debug = application.settings?.debug;
|
let debug = application.settings?.debug;
|
||||||
@ -78,12 +78,12 @@
|
|||||||
let autodeploy = application.settings?.autodeploy;
|
let autodeploy = application.settings?.autodeploy;
|
||||||
let isBot = application.settings?.isBot;
|
let isBot = application.settings?.isBot;
|
||||||
let isDBBranching = application.settings?.isDBBranching;
|
let isDBBranching = application.settings?.isDBBranching;
|
||||||
let htmlUrl = application.gitSource.htmlUrl;
|
let htmlUrl = application.gitSource?.htmlUrl;
|
||||||
|
|
||||||
let dockerComposeFile = JSON.parse(application.dockerComposeFile) || null;
|
let dockerComposeFile = JSON.parse(application.dockerComposeFile) || null;
|
||||||
let dockerComposeServices: any[] = [];
|
let dockerComposeServices: any[] = [];
|
||||||
let dockerComposeFileLocation = application.dockerComposeFileLocation;
|
|
||||||
let dockerComposeConfiguration = JSON.parse(application.dockerComposeConfiguration) || {};
|
let dockerComposeConfiguration = JSON.parse(application.dockerComposeConfiguration) || {};
|
||||||
|
let originalDockerComposeFileLocation = application.dockerComposeFileLocation;
|
||||||
|
|
||||||
let baseDatabaseBranch: any = application?.connectedDatabase?.hostedDatabaseDBName || null;
|
let baseDatabaseBranch: any = application?.connectedDatabase?.hostedDatabaseDBName || null;
|
||||||
let nonWWWDomain = application.fqdn && getDomain(application.fqdn).replace(/^www\./, '');
|
let nonWWWDomain = application.fqdn && getDomain(application.fqdn).replace(/^www\./, '');
|
||||||
@ -243,8 +243,12 @@
|
|||||||
if (toast) loading.save = true;
|
if (toast) loading.save = true;
|
||||||
try {
|
try {
|
||||||
nonWWWDomain = application.fqdn && getDomain(application.fqdn).replace(/^www\./, '');
|
nonWWWDomain = application.fqdn && getDomain(application.fqdn).replace(/^www\./, '');
|
||||||
if (application.deploymentType)
|
if (application.deploymentType) {
|
||||||
application.deploymentType = application.deploymentType.toLowerCase();
|
application.deploymentType = application.deploymentType.toLowerCase();
|
||||||
|
}
|
||||||
|
if (originalDockerComposeFileLocation !== application.dockerComposeFileLocation) {
|
||||||
|
await reloadCompose();
|
||||||
|
}
|
||||||
if (!isBot) {
|
if (!isBot) {
|
||||||
await post(`/applications/${id}/check`, {
|
await post(`/applications/${id}/check`, {
|
||||||
fqdn: application.fqdn,
|
fqdn: application.fqdn,
|
||||||
@ -263,21 +267,18 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await post(`/applications/${id}`, {
|
await saveForm(id, application, baseDatabaseBranch, dockerComposeConfiguration);
|
||||||
...application,
|
|
||||||
baseDatabaseBranch,
|
|
||||||
dockerComposeConfiguration: JSON.stringify(dockerComposeConfiguration)
|
|
||||||
});
|
|
||||||
setLocation(application, settings);
|
setLocation(application, settings);
|
||||||
$isDeploymentEnabled = checkIfDeploymentEnabledApplications($appSession.isAdmin, application);
|
$isDeploymentEnabled = checkIfDeploymentEnabledApplications($appSession.isAdmin, application);
|
||||||
|
|
||||||
forceSave = false;
|
forceSave = false;
|
||||||
|
if (toast) {
|
||||||
toast &&
|
|
||||||
addToast({
|
addToast({
|
||||||
message: 'Configuration saved.',
|
message: 'Configuration saved.',
|
||||||
type: 'success'
|
type: 'success'
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (application.fqdn && application.fqdn.startsWith('https')) {
|
if (application.fqdn && application.fqdn.startsWith('https')) {
|
||||||
isHttps = true;
|
isHttps = true;
|
||||||
} else {
|
} else {
|
||||||
@ -365,6 +366,9 @@
|
|||||||
async function reloadCompose() {
|
async function reloadCompose() {
|
||||||
if (loading.reloadCompose) return;
|
if (loading.reloadCompose) return;
|
||||||
loading.reloadCompose = true;
|
loading.reloadCompose = true;
|
||||||
|
const composeLocation = application.dockerComposeFileLocation.startsWith('/')
|
||||||
|
? application.dockerComposeFileLocation
|
||||||
|
: `/${application.dockerComposeFileLocation}`;
|
||||||
try {
|
try {
|
||||||
if (application.gitSource.type === 'github') {
|
if (application.gitSource.type === 'github') {
|
||||||
const headers = isPublicRepository
|
const headers = isPublicRepository
|
||||||
@ -373,9 +377,10 @@
|
|||||||
Authorization: `token ${$appSession.tokens.github}`
|
Authorization: `token ${$appSession.tokens.github}`
|
||||||
};
|
};
|
||||||
const data = await get(
|
const data = await get(
|
||||||
`${apiUrl}/repos/${repository}/contents/${dockerComposeFileLocation}?ref=${branch}`,
|
`${apiUrl}/repos/${repository}/contents/${composeLocation}?ref=${branch}`,
|
||||||
{
|
{
|
||||||
...headers,
|
...headers,
|
||||||
|
'If-None-Match': '',
|
||||||
Accept: 'application/vnd.github.v2.json'
|
Accept: 'application/vnd.github.v2.json'
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -405,7 +410,7 @@
|
|||||||
});
|
});
|
||||||
const dockerComposeFileYml = files.find(
|
const dockerComposeFileYml = files.find(
|
||||||
(file: { name: string; type: string }) =>
|
(file: { name: string; type: string }) =>
|
||||||
file.name === dockerComposeFileLocation && file.type === 'blob'
|
file.name === composeLocation && file.type === 'blob'
|
||||||
);
|
);
|
||||||
const id = dockerComposeFileYml.id;
|
const id = dockerComposeFileYml.id;
|
||||||
|
|
||||||
@ -424,12 +429,17 @@
|
|||||||
await handleSubmit(false);
|
await handleSubmit(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
originalDockerComposeFileLocation = application.dockerComposeFileLocation;
|
||||||
addToast({
|
addToast({
|
||||||
message: 'Compose file reloaded.',
|
message: 'Compose file reloaded.',
|
||||||
type: 'success'
|
type: 'success'
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
|
if (error.message === 'Not Found') {
|
||||||
|
error.message = `Can't find ${application.dockerComposeFileLocation} file.`;
|
||||||
|
errorNotification(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
errorNotification(error);
|
errorNotification(error);
|
||||||
} finally {
|
} finally {
|
||||||
loading.reloadCompose = false;
|
loading.reloadCompose = false;
|
||||||
@ -462,7 +472,7 @@
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
<form on:submit|preventDefault={() => handleSubmit()}>
|
<form id="saveForm" on:submit|preventDefault={() => handleSubmit()}>
|
||||||
<div class="mx-auto w-full">
|
<div class="mx-auto w-full">
|
||||||
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
||||||
<div class="title font-bold pb-3">General</div>
|
<div class="title font-bold pb-3">General</div>
|
||||||
@ -482,68 +492,148 @@
|
|||||||
<label for="name">{$t('forms.name')}</label>
|
<label for="name">{$t('forms.name')}</label>
|
||||||
<input name="name" id="name" class="w-full" bind:value={application.name} required />
|
<input name="name" id="name" class="w-full" bind:value={application.name} required />
|
||||||
</div>
|
</div>
|
||||||
<div class="grid grid-cols-2 items-center">
|
{#if !isSimpleDockerfile}
|
||||||
<label for="gitSource">{$t('application.git_source')}</label>
|
<div class="grid grid-cols-2 items-center">
|
||||||
{#if isDisabled || application.settings.isPublicRepository}
|
<label for="gitSource">{$t('application.git_source')}</label>
|
||||||
<input
|
{#if isDisabled || application.settings?.isPublicRepository}
|
||||||
disabled={isDisabled || application.settings.isPublicRepository}
|
<input
|
||||||
class="w-full"
|
disabled={isDisabled || application.settings?.isPublicRepository}
|
||||||
value={application.gitSource.name}
|
class="w-full"
|
||||||
/>
|
value={application.gitSource?.name}
|
||||||
{:else}
|
/>
|
||||||
<a
|
{:else}
|
||||||
href={`/applications/${id}/configuration/source?from=/applications/${id}`}
|
<a
|
||||||
class="no-underline"
|
href={`/applications/${id}/configuration/source?from=/applications/${id}`}
|
||||||
><input
|
class="no-underline"
|
||||||
value={application.gitSource.name}
|
><input
|
||||||
id="gitSource"
|
value={application.gitSource?.name}
|
||||||
class="cursor-pointer hover:bg-coolgray-500 w-full"
|
id="gitSource"
|
||||||
/></a
|
class="cursor-pointer hover:bg-coolgray-500 w-full"
|
||||||
>
|
/></a
|
||||||
{/if}
|
>
|
||||||
</div>
|
{/if}
|
||||||
<div class="grid grid-cols-2 items-center">
|
</div>
|
||||||
<label for="repository">{$t('application.git_repository')}</label>
|
<div class="grid grid-cols-2 items-center">
|
||||||
{#if isDisabled || application.settings.isPublicRepository}
|
<label for="repository">Git commit</label>
|
||||||
<input
|
<div class="flex gap-2">
|
||||||
class="w-full"
|
<input
|
||||||
disabled={isDisabled || application.settings.isPublicRepository}
|
id="commit"
|
||||||
value="{application.repository}/{application.branch}"
|
name="commit"
|
||||||
/>
|
class="w-full"
|
||||||
{:else}
|
disabled={isDisabled}
|
||||||
<a
|
placeholder="default: latest commit"
|
||||||
href={`/applications/${id}/configuration/repository?from=/applications/${id}&to=/applications/${id}/configuration/buildpack`}
|
bind:value={application.gitCommitHash}
|
||||||
class="no-underline"
|
/>
|
||||||
><input
|
<a
|
||||||
|
href="{application.gitSource
|
||||||
|
?.htmlUrl}/{application.repository}/commits/{application.branch}"
|
||||||
|
target="_blank noreferrer"
|
||||||
|
class="btn btn-primary text-xs"
|
||||||
|
>Commits<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="currentColor"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="3"
|
||||||
|
stroke="currentColor"
|
||||||
|
class="w-3 h-3 text-white ml-2"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
d="M4.5 19.5l15-15m0 0H8.25m11.25 0v11.25"
|
||||||
|
/>
|
||||||
|
</svg></a
|
||||||
|
>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="grid grid-cols-2 items-center">
|
||||||
|
<label for="repository">{$t('application.git_repository')}</label>
|
||||||
|
{#if isDisabled || application.settings?.isPublicRepository}
|
||||||
|
<input
|
||||||
|
class="w-full"
|
||||||
|
disabled={isDisabled || application.settings?.isPublicRepository}
|
||||||
value="{application.repository}/{application.branch}"
|
value="{application.repository}/{application.branch}"
|
||||||
id="repository"
|
/>
|
||||||
class="cursor-pointer hover:bg-coolgray-500 w-full"
|
{:else}
|
||||||
/></a
|
<a
|
||||||
>
|
href={`/applications/${id}/configuration/repository?from=/applications/${id}&to=/applications/${id}/configuration/buildpack`}
|
||||||
{/if}
|
class="no-underline"
|
||||||
</div>
|
><input
|
||||||
|
value="{application.repository}/{application.branch}"
|
||||||
|
id="repository"
|
||||||
|
class="cursor-pointer hover:bg-coolgray-500 w-full"
|
||||||
|
/></a
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
<div class="grid grid-cols-2 items-center">
|
<div class="grid grid-cols-2 items-center">
|
||||||
<label for="buildPack">{$t('application.build_pack')} </label>
|
<label for="registry">Docker Registry</label>
|
||||||
{#if isDisabled}
|
{#if isDisabled}
|
||||||
<input class="capitalize w-full" disabled={isDisabled} value={application.buildPack} />
|
<input
|
||||||
|
class="capitalize w-full"
|
||||||
|
disabled={isDisabled}
|
||||||
|
value={application.dockerRegistry?.name || 'DockerHub (unauthenticated)'}
|
||||||
|
/>
|
||||||
{:else}
|
{:else}
|
||||||
<a
|
<a
|
||||||
href={`/applications/${id}/configuration/buildpack?from=/applications/${id}`}
|
href={`/applications/${id}/configuration/registry?from=/applications/${id}`}
|
||||||
class="no-underline"
|
class="no-underline"
|
||||||
>
|
>
|
||||||
<input
|
<input
|
||||||
value={application.buildPack}
|
value={application.dockerRegistry?.name || 'DockerHub (unauthenticated)'}
|
||||||
id="buildPack"
|
id="registry"
|
||||||
class="cursor-pointer hover:bg-coolgray-500 capitalize w-full"
|
class="cursor-pointer hover:bg-coolgray-500 capitalize w-full"
|
||||||
/></a
|
/></a
|
||||||
>
|
>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
{#if application.dockerRegistry?.id && application.gitSourceId}
|
||||||
|
<div class="grid grid-cols-2 items-center">
|
||||||
|
<label for="registry"
|
||||||
|
>Push Image to Registry <Explainer
|
||||||
|
explanation="Push the build image to the specific Docker Registry.<br><br>This is useful if you want to use the image in other places. If you don't fill this the image will be only available on the server.<br><br>Tag is optional. If you don't fill it, the tag will be the same as the git commit hash."
|
||||||
|
/></label
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
name="dockerRegistryImageName"
|
||||||
|
id="dockerRegistryImageName"
|
||||||
|
readonly={isDisabled}
|
||||||
|
disabled={isDisabled}
|
||||||
|
class="w-full"
|
||||||
|
placeholder="e.g. coollabsio/myimage (tag will be commit sha) or coollabsio/myimage:tag"
|
||||||
|
bind:value={application.dockerRegistryImageName}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{#if !isSimpleDockerfile}
|
||||||
|
<div class="grid grid-cols-2 items-center">
|
||||||
|
<label for="buildPack">{$t('application.build_pack')} </label>
|
||||||
|
{#if isDisabled}
|
||||||
|
<input
|
||||||
|
class="capitalize w-full"
|
||||||
|
disabled={isDisabled}
|
||||||
|
value={application.buildPack}
|
||||||
|
/>
|
||||||
|
{:else}
|
||||||
|
<a
|
||||||
|
href={`/applications/${id}/configuration/buildpack?from=/applications/${id}`}
|
||||||
|
class="no-underline"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
value={application.buildPack}
|
||||||
|
id="buildPack"
|
||||||
|
class="cursor-pointer hover:bg-coolgray-500 capitalize w-full"
|
||||||
|
/></a
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
<div class="grid grid-cols-2 items-center">
|
<div class="grid grid-cols-2 items-center">
|
||||||
<label for="destination">{$t('application.destination')}</label>
|
<label for="destination">{$t('application.destination')}</label>
|
||||||
<div class="no-underline">
|
<div class="no-underline">
|
||||||
<input
|
<input
|
||||||
value={application.destinationDocker.name}
|
value={application.destinationDocker?.name}
|
||||||
id="destination"
|
id="destination"
|
||||||
disabled
|
disabled
|
||||||
class="bg-transparent w-full"
|
class="bg-transparent w-full"
|
||||||
@ -648,7 +738,44 @@
|
|||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if application.buildPack !== 'compose'}
|
{#if isSimpleDockerfile}
|
||||||
|
<div class="title font-bold pb-3 pt-10 border-b border-coolgray-500 mb-6">
|
||||||
|
Configuration
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="grid grid-flow-row gap-2 px-4 pr-5">
|
||||||
|
<div class="grid grid-cols-2 items-center pt-4">
|
||||||
|
<label for="simpleDockerfile">Dockerfile</label>
|
||||||
|
<div class="flex gap-2">
|
||||||
|
<textarea
|
||||||
|
rows="10"
|
||||||
|
id="simpleDockerfile"
|
||||||
|
name="simpleDockerfile"
|
||||||
|
class="w-full"
|
||||||
|
disabled={isDisabled}
|
||||||
|
bind:value={application.simpleDockerfile}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="grid grid-cols-2 items-center">
|
||||||
|
<label for="port"
|
||||||
|
>{$t('forms.port')}
|
||||||
|
<Explainer
|
||||||
|
explanation={'The port your application listens inside the docker container.'}
|
||||||
|
/></label
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
class="w-full"
|
||||||
|
disabled={isDisabled}
|
||||||
|
readonly={!$appSession.isAdmin}
|
||||||
|
name="port"
|
||||||
|
id="port"
|
||||||
|
bind:value={application.port}
|
||||||
|
placeholder="{$t('forms.default')}: 3000"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if application.buildPack !== 'compose'}
|
||||||
<div class="title font-bold pb-3 pt-10 border-b border-coolgray-500 mb-6">
|
<div class="title font-bold pb-3 pt-10 border-b border-coolgray-500 mb-6">
|
||||||
Configuration
|
Configuration
|
||||||
</div>
|
</div>
|
||||||
@ -753,7 +880,7 @@
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div class="text-center bg-green-600 rounded">
|
<div class="text-center bg-green-600 rounded">
|
||||||
Connected to {application.connectedDatabase.databaseId}
|
Connected to {application.connectedDatabase?.databaseId}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
@ -959,6 +1086,7 @@
|
|||||||
placeholder="default: /Dockerfile"
|
placeholder="default: /Dockerfile"
|
||||||
/>
|
/>
|
||||||
{#if application.baseDirectory}
|
{#if application.baseDirectory}
|
||||||
|
<!-- svelte-ignore a11y-label-has-associated-control -->
|
||||||
<label class="label">
|
<label class="label">
|
||||||
<span class="label-text-alt text-xs"
|
<span class="label-text-alt text-xs"
|
||||||
>Path: {application.baseDirectory.replace(
|
>Path: {application.baseDirectory.replace(
|
||||||
@ -1008,6 +1136,25 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
<div class="grid grid-flow-row gap-2">
|
<div class="grid grid-flow-row gap-2">
|
||||||
|
<div class="grid grid-cols-2 items-center px-8 pb-4">
|
||||||
|
<label for="dockerComposeFileLocation"
|
||||||
|
>Docker Compose File Location
|
||||||
|
<Explainer
|
||||||
|
explanation="You can specify a custom docker compose file location. <br> Should be absolute path, like <span class='text-settings font-bold'>/data/docker-compose.yml</span> or <span class='text-settings font-bold'>/docker-compose.yml.</span>"
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
<div>
|
||||||
|
<input
|
||||||
|
class="w-full"
|
||||||
|
disabled={isDisabled}
|
||||||
|
readonly={!$appSession.isAdmin}
|
||||||
|
name="dockerComposeFileLocation"
|
||||||
|
id="dockerComposeFileLocation"
|
||||||
|
bind:value={application.dockerComposeFileLocation}
|
||||||
|
placeholder="eg: /docker-compose.yml"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
{#each dockerComposeServices as service}
|
{#each dockerComposeServices as service}
|
||||||
<div
|
<div
|
||||||
class="grid items-center bg-coolgray-100 rounded border border-coolgray-300 p-2 px-4"
|
class="grid items-center bg-coolgray-100 rounded border border-coolgray-300 p-2 px-4"
|
||||||
@ -1083,7 +1230,7 @@
|
|||||||
readonly={!$appSession.isAdmin}
|
readonly={!$appSession.isAdmin}
|
||||||
name="port"
|
name="port"
|
||||||
id="port"
|
id="port"
|
||||||
required={!!dockerComposeConfiguration[service.name].fqdn}
|
required={!!dockerComposeConfiguration[service.name]?.fqdn}
|
||||||
bind:value={dockerComposeConfiguration[service.name].port}
|
bind:value={dockerComposeConfiguration[service.name].port}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
@ -55,7 +55,7 @@
|
|||||||
branch: preview.sourceBranch
|
branch: preview.sourceBranch
|
||||||
});
|
});
|
||||||
addToast({
|
addToast({
|
||||||
message: 'Deployment queued',
|
message: 'Deployment queued.',
|
||||||
type: 'success'
|
type: 'success'
|
||||||
});
|
});
|
||||||
if ($page.url.pathname.startsWith(`/applications/${id}/logs/build`)) {
|
if ($page.url.pathname.startsWith(`/applications/${id}/logs/build`)) {
|
||||||
@ -183,9 +183,7 @@
|
|||||||
<div class="flex justify-center py-4 text-center text-xl font-bold">Loading...</div>
|
<div class="flex justify-center py-4 text-center text-xl font-bold">Loading...</div>
|
||||||
</div>
|
</div>
|
||||||
{:else if application.previewApplication.length > 0}
|
{:else if application.previewApplication.length > 0}
|
||||||
<div
|
<div class="grid grid-col gap-4 auto-cols-max grid-cols-1 md:grid-cols-2 lg:grid-cols-2 px-6">
|
||||||
class="grid grid-col gap-4 auto-cols-max grid-cols-1 md:grid-cols-2 lg:grid-cols-2 px-6"
|
|
||||||
>
|
|
||||||
{#each application.previewApplication as preview}
|
{#each application.previewApplication as preview}
|
||||||
<div class="no-underline mb-5 w-full">
|
<div class="no-underline mb-5 w-full">
|
||||||
<div class="w-full rounded p-5 bg-coolgray-200 indicator">
|
<div class="w-full rounded p-5 bg-coolgray-200 indicator">
|
||||||
@ -216,7 +214,12 @@
|
|||||||
|
|
||||||
<div class="flex justify-end items-end space-x-2 h-10">
|
<div class="flex justify-end items-end space-x-2 h-10">
|
||||||
{#if preview.customDomain}
|
{#if preview.customDomain}
|
||||||
<a id="openpreview" href={preview.customDomain} target="_blank noreferrer" class="icons">
|
<a
|
||||||
|
id="openpreview"
|
||||||
|
href={preview.customDomain}
|
||||||
|
target="_blank noreferrer"
|
||||||
|
class="icons"
|
||||||
|
>
|
||||||
<svg
|
<svg
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
class="h-6 w-6"
|
class="h-6 w-6"
|
||||||
|
167
apps/ui/src/routes/applications/[id]/revert.svelte
Normal file
167
apps/ui/src/routes/applications/[id]/revert.svelte
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
<script context="module" lang="ts">
|
||||||
|
import type { Load } from '@sveltejs/kit';
|
||||||
|
export const load: Load = async ({ fetch, params, stuff, url }) => {
|
||||||
|
try {
|
||||||
|
const response = await get(`/applications/${params.id}/images`);
|
||||||
|
return {
|
||||||
|
props: {
|
||||||
|
application: stuff.application,
|
||||||
|
...response
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
status: 500,
|
||||||
|
error: new Error(`Could not load ${url}`)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
|
export let application: any;
|
||||||
|
export let imagesAvailables: any;
|
||||||
|
export let runningImage: any;
|
||||||
|
import { page } from '$app/stores';
|
||||||
|
import { get, post } from '$lib/api';
|
||||||
|
import { status, addToast } from '$lib/store';
|
||||||
|
import { errorNotification } from '$lib/common';
|
||||||
|
import Explainer from '$lib/components/Explainer.svelte';
|
||||||
|
|
||||||
|
const { id } = $page.params;
|
||||||
|
let remoteImage: any = null;
|
||||||
|
|
||||||
|
async function revertToLocal(image: any) {
|
||||||
|
const sure = confirm(`Are you sure you want to revert to ${image.tag} ?`);
|
||||||
|
if (sure) {
|
||||||
|
try {
|
||||||
|
$status.application.initialLoading = true;
|
||||||
|
$status.application.loading = true;
|
||||||
|
const imageId = `${image.repository}:${image.tag}`;
|
||||||
|
await post(`/applications/${id}/restart`, { imageId });
|
||||||
|
addToast({
|
||||||
|
type: 'success',
|
||||||
|
message: 'Revert successful.'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return errorNotification(error);
|
||||||
|
} finally {
|
||||||
|
$status.application.initialLoading = false;
|
||||||
|
$status.application.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function revertToRemote() {
|
||||||
|
const sure = confirm(`Are you sure you want to revert to ${remoteImage} ?`);
|
||||||
|
if (sure) {
|
||||||
|
try {
|
||||||
|
$status.application.initialLoading = true;
|
||||||
|
$status.application.loading = true;
|
||||||
|
$status.application.restarting = true;
|
||||||
|
await post(`/applications/${id}/restart`, { imageId: remoteImage });
|
||||||
|
addToast({
|
||||||
|
type: 'success',
|
||||||
|
message: 'Revert successful.'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return errorNotification(error);
|
||||||
|
} finally {
|
||||||
|
$status.application.initialLoading = false;
|
||||||
|
$status.application.loading = false;
|
||||||
|
$status.application.restarting = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="w-full">
|
||||||
|
<div class="mx-auto w-full">
|
||||||
|
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
||||||
|
<div class="title font-bold pb-3">
|
||||||
|
Revert <Explainer
|
||||||
|
position="dropdown-bottom"
|
||||||
|
explanation="You can revert application to a previously built image. Currently only locally stored images
|
||||||
|
supported."
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="pb-4 text-xs">
|
||||||
|
If you do not want the next commit to overwrite the reverted application, temporary disable <span
|
||||||
|
class="text-yellow-400 font-bold">Automatic Deployment</span
|
||||||
|
>
|
||||||
|
feature <a href={`/applications/${id}/features`}>here</a>.
|
||||||
|
</div>
|
||||||
|
{#if imagesAvailables.length > 0}
|
||||||
|
<div class="text-xl font-bold pb-3">Local Images</div>
|
||||||
|
<div
|
||||||
|
class="px-4 lg:pb-10 pb-6 flex flex-wrap items-center justify-center lg:justify-start gap-8"
|
||||||
|
>
|
||||||
|
{#each imagesAvailables as image}
|
||||||
|
<div class="gap-2 py-4 m-2">
|
||||||
|
<div class="flex flex-col justify-center items-center">
|
||||||
|
<div class="text-xl font-bold">
|
||||||
|
{image.tag}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<a
|
||||||
|
class="flex no-underline text-xs my-4"
|
||||||
|
href="{application.gitSource.htmlUrl}/{application.repository}/commit/{image.tag}"
|
||||||
|
target="_blank noreferrer"
|
||||||
|
>
|
||||||
|
<button class="btn btn-sm">
|
||||||
|
Check Commit
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="currentColor"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="3"
|
||||||
|
stroke="currentColor"
|
||||||
|
class="w-3 h-3 text-white ml-2"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
d="M4.5 19.5l15-15m0 0H8.25m11.25 0v11.25"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
</button></a
|
||||||
|
>
|
||||||
|
{#if image.repository + ':' + image.tag !== runningImage}
|
||||||
|
<button
|
||||||
|
class="btn btn-sm btn-primary w-full"
|
||||||
|
on:click={() => revertToLocal(image)}>Revert Now</button
|
||||||
|
>
|
||||||
|
{:else}
|
||||||
|
<button
|
||||||
|
class="btn btn-sm btn-primary w-full btn-disabled bg-transparent underline"
|
||||||
|
>Currently Used</button
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<div class="flex flex-col pb-10">
|
||||||
|
<div class="text-xl font-bold">No Local images available</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class="text-xl font-bold pb-3">
|
||||||
|
Remote Images (Docker Registry) <Explainer
|
||||||
|
position="dropdown-bottom"
|
||||||
|
explanation="If the image is not available or you are unauthorized to access it, you will not be able to revert to it."
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<form on:submit|preventDefault={revertToRemote}>
|
||||||
|
<input
|
||||||
|
id="dockerImage"
|
||||||
|
name="dockerImage"
|
||||||
|
required
|
||||||
|
placeholder="coollabsio/coolify:0.0.1"
|
||||||
|
bind:value={remoteImage}
|
||||||
|
/>
|
||||||
|
<button class="btn btn-sm btn-primary" type="submit">Revert Now</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
export let secrets: any;
|
export let secrets: any;
|
||||||
|
export let application: any;
|
||||||
export let previewSecrets: any;
|
export let previewSecrets: any;
|
||||||
import pLimit from 'p-limit';
|
import pLimit from 'p-limit';
|
||||||
import { page } from '$app/stores';
|
import { page } from '$app/stores';
|
||||||
@ -28,7 +29,6 @@
|
|||||||
import Secret from './_Secret.svelte';
|
import Secret from './_Secret.svelte';
|
||||||
import PreviewSecret from './_PreviewSecret.svelte';
|
import PreviewSecret from './_PreviewSecret.svelte';
|
||||||
import { errorNotification } from '$lib/common';
|
import { errorNotification } from '$lib/common';
|
||||||
import { t } from '$lib/translations';
|
|
||||||
import Explainer from '$lib/components/Explainer.svelte';
|
import Explainer from '$lib/components/Explainer.svelte';
|
||||||
|
|
||||||
const limit = pLimit(1);
|
const limit = pLimit(1);
|
||||||
@ -110,6 +110,7 @@
|
|||||||
<div class="lg:pt-0 pt-10">
|
<div class="lg:pt-0 pt-10">
|
||||||
<Secret on:refresh={refreshSecrets} length={secrets.length} isNewSecret />
|
<Secret on:refresh={refreshSecrets} length={secrets.length} isNewSecret />
|
||||||
</div>
|
</div>
|
||||||
|
{#if !application.settings.isBot && !application.simpleDockerfile}
|
||||||
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
||||||
<div class="title font-bold pb-3 pt-8">
|
<div class="title font-bold pb-3 pt-8">
|
||||||
Preview Secrets <Explainer
|
Preview Secrets <Explainer
|
||||||
@ -133,6 +134,7 @@
|
|||||||
{:else}
|
{:else}
|
||||||
Add secrets first to see Preview Secrets.
|
Add secrets first to see Preview Secrets.
|
||||||
{/if}
|
{/if}
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
<form on:submit|preventDefault={getValues} class="mb-12 w-full">
|
<form on:submit|preventDefault={getValues} class="mb-12 w-full">
|
||||||
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2 pt-10">
|
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2 pt-10">
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user