diff --git a/.gitignore b/.gitignore index eddb26abd..ddc188f8c 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ apps/api/db/*.db local-serve apps/api/db/migration.db-journal apps/api/core* -logs \ No newline at end of file +logs +others/certificates \ No newline at end of file diff --git a/apps/api/package.json b/apps/api/package.json index f3635517b..8955c00f8 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -20,6 +20,7 @@ "@fastify/cors": "8.1.0", "@fastify/env": "4.1.0", "@fastify/jwt": "6.3.2", + "@fastify/multipart": "7.2.0", "@fastify/static": "6.5.0", "@iarna/toml": "2.2.5", "@ladjs/graceful": "3.0.2", @@ -49,6 +50,7 @@ "p-all": "4.0.0", "p-throttle": "5.0.0", "public-ip": "6.0.1", + "pump": "^3.0.0", "ssh-config": "4.1.6", "strip-ansi": "7.0.1", "unique-names-generator": "4.7.1" diff --git a/apps/api/prisma/migrations/20220922064605_custom_certificates/migration.sql b/apps/api/prisma/migrations/20220922064605_custom_certificates/migration.sql new file mode 100644 index 000000000..804913038 --- /dev/null +++ b/apps/api/prisma/migrations/20220922064605_custom_certificates/migration.sql @@ -0,0 +1,10 @@ +-- CreateTable +CREATE TABLE "Certificate" ( + "id" TEXT NOT NULL PRIMARY KEY, + "key" TEXT NOT NULL, + "cert" TEXT NOT NULL, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL, + "teamId" TEXT, + CONSTRAINT "Certificate_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); diff --git a/apps/api/prisma/migrations/20220923122227_custom_ssl_for_applications/migration.sql b/apps/api/prisma/migrations/20220923122227_custom_ssl_for_applications/migration.sql new file mode 100644 index 000000000..b9261a955 --- /dev/null +++ b/apps/api/prisma/migrations/20220923122227_custom_ssl_for_applications/migration.sql @@ -0,0 +1,23 @@ +-- RedefineTables +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_ApplicationSettings" ( + "id" TEXT NOT NULL PRIMARY KEY, + "applicationId" TEXT NOT NULL, + "dualCerts" BOOLEAN NOT NULL DEFAULT false, + "debug" BOOLEAN NOT NULL DEFAULT false, + "previews" BOOLEAN NOT NULL DEFAULT false, + "autodeploy" BOOLEAN NOT NULL DEFAULT true, + "isBot" BOOLEAN NOT NULL DEFAULT false, + "isPublicRepository" BOOLEAN NOT NULL DEFAULT false, + "isDBBranching" BOOLEAN NOT NULL DEFAULT false, + "isCustomSSL" BOOLEAN NOT NULL DEFAULT false, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL, + CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); +INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings"; +DROP TABLE "ApplicationSettings"; +ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings"; +CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId"); +PRAGMA foreign_key_check; +PRAGMA foreign_keys=ON; diff --git a/apps/api/prisma/schema.prisma b/apps/api/prisma/schema.prisma index a7500539d..d064f707a 100644 --- a/apps/api/prisma/schema.prisma +++ b/apps/api/prisma/schema.prisma @@ -8,6 +8,16 @@ datasource db { url = env("COOLIFY_DATABASE_URL") } +model Certificate { + id String @id @default(cuid()) + key String + cert String + team Team? @relation(fields: [teamId], references: [id]) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + teamId String? +} + model Setting { id String @id @default(cuid()) fqdn String? @unique @@ -70,6 +80,7 @@ model Team { gitLabApps GitlabApp[] service Service[] users User[] + certificate Certificate[] } model TeamInvitation { @@ -161,6 +172,7 @@ model ApplicationSettings { isBot Boolean @default(false) isPublicRepository Boolean @default(false) isDBBranching Boolean @default(false) + isCustomSSL Boolean @default(false) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt application Application @relation(fields: [applicationId], references: [id]) diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 902596459..acbae3d71 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -3,6 +3,7 @@ import cors from '@fastify/cors'; import serve from '@fastify/static'; import env from '@fastify/env'; import cookie from '@fastify/cookie'; +import multipart from '@fastify/multipart'; import path, { join } from 'path'; import autoLoad from '@fastify/autoload'; import { asyncExecShell, createRemoteEngineConfiguration, getDomain, isDev, listSettings, prisma, version } from './lib/common'; @@ -31,6 +32,7 @@ prisma.setting.findFirst().then(async (settings) => { logger: settings?.isAPIDebuggingEnabled || false, trustProxy: true }); + const schema = { type: 'object', required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'], @@ -88,13 +90,13 @@ prisma.setting.findFirst().then(async (settings) => { return reply.status(200).sendFile('index.html'); }); } + fastify.register(multipart, { limits: { fileSize: 100000 } }); fastify.register(autoLoad, { dir: join(__dirname, 'plugins') }); fastify.register(autoLoad, { dir: join(__dirname, 'routes') }); - fastify.register(cookie) fastify.register(cors); fastify.addHook('onRequest', async (request, reply) => { @@ -145,11 +147,16 @@ prisma.setting.findFirst().then(async (settings) => { scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupStorage") }, isDev ? 6000 : 60000 * 10) - // checkProxies + // checkProxies and checkFluentBit setInterval(async () => { scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkProxies") + scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkFluentBit") }, 10000) + setInterval(async () => { + scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:copySSLCertificates") + }, 2000) + // cleanupPrismaEngines // setInterval(async () => { // scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupPrismaEngines") diff --git a/apps/api/src/jobs/deployApplication.ts b/apps/api/src/jobs/deployApplication.ts index 55a7ef461..89859620f 100644 --- a/apps/api/src/jobs/deployApplication.ts +++ b/apps/api/src/jobs/deployApplication.ts @@ -154,7 +154,7 @@ import * as buildpacks from '../lib/buildPacks'; startCommand = configuration.startCommand; buildCommand = configuration.buildCommand; publishDirectory = configuration.publishDirectory; - baseDirectory = configuration.baseDirectory; + baseDirectory = configuration.baseDirectory || ''; dockerFileLocation = configuration.dockerFileLocation; denoMainFile = configuration.denoMainFile; const commit = await importers[gitSource.type]({ diff --git a/apps/api/src/jobs/infrastructure.ts b/apps/api/src/jobs/infrastructure.ts index 23e380610..e0b2d3a61 100644 --- a/apps/api/src/jobs/infrastructure.ts +++ b/apps/api/src/jobs/infrastructure.ts @@ -1,8 +1,9 @@ import { parentPort } from 'node:worker_threads'; import axios from 'axios'; import { compareVersions } from 'compare-versions'; -import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version, createRemoteEngineConfiguration } from '../lib/common'; - +import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version, createRemoteEngineConfiguration, decrypt, executeSSHCmd } from '../lib/common'; +import { checkContainer } from '../lib/docker'; +import fs from 'fs/promises' async function autoUpdater() { try { const currentVersion = version; @@ -39,6 +40,68 @@ async function autoUpdater() { } } catch (error) { } } +async function checkFluentBit() { + if (!isDev) { + const engine = '/var/run/docker.sock'; + const { id } = await prisma.destinationDocker.findFirst({ + where: { engine, network: 'coolify' } + }); + const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit' }); + if (!found) { + await asyncExecShell(`env | grep COOLIFY > .env`); + await asyncExecShell(`docker compose up -d fluent-bit`); + } + } +} +async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) { + try { + await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`) + await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` }) + await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` }) + await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` }) + } catch (error) { + console.log({ error }) + } +} +async function copyLocalCertificates(id: string) { + try { + await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`) + await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`) + await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`) + } catch (error) { + console.log({ error }) + } +} +async function copySSLCertificates() { + try { + const pAll = await import('p-all'); + const actions = [] + const certificates = await prisma.certificate.findMany({ include: { team: true } }) + const teamIds = certificates.map(c => c.teamId) + const destinations = await prisma.destinationDocker.findMany({ where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } } }) + for (const certificate of certificates) { + const { id, key, cert } = certificate + const decryptedKey = decrypt(key) + await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey) + await fs.writeFile(`/tmp/${id}-cert.pem`, cert) + for (const destination of destinations) { + if (destination.remoteEngine) { + if (destination.remoteVerified) { + const { id: dockerId, remoteIpAddress } = destination + actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress)) + } + } else { + actions.push(async () => copyLocalCertificates(id)) + } + } + } + await pAll.default(actions, { concurrency: 1 }) + } catch (error) { + console.log(error) + } finally { + await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`) + } +} async function checkProxies() { try { const { default: isReachable } = await import('is-port-reachable'); @@ -189,7 +252,8 @@ async function cleanupStorage() { (async () => { let status = { cleanupStorage: false, - autoUpdater: false + autoUpdater: false, + copySSLCertificates: false, } if (parentPort) { parentPort.on('message', async (message) => { @@ -215,6 +279,18 @@ async function cleanupStorage() { await checkProxies(); return; } + if (message === 'action:checkFluentBit') { + await checkFluentBit(); + return; + } + if (message === 'action:copySSLCertificates') { + if (!status.copySSLCertificates) { + status.copySSLCertificates = true + await copySSLCertificates(); + status.copySSLCertificates = false + } + return; + } if (message === 'action:autoUpdater') { if (!status.cleanupStorage) { status.autoUpdater = true diff --git a/apps/api/src/lib/buildPacks/common.ts b/apps/api/src/lib/buildPacks/common.ts index 44812a76d..37a4b023c 100644 --- a/apps/api/src/lib/buildPacks/common.ts +++ b/apps/api/src/lib/buildPacks/common.ts @@ -342,13 +342,13 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st } if (buildPack === 'laravel') { payload.baseImage = 'webdevops/php-apache:8.2-alpine'; + payload.baseImages = phpVersions; payload.baseBuildImage = 'node:18'; payload.baseBuildImages = nodeVersions; } if (buildPack === 'heroku') { payload.baseImage = 'heroku/buildpacks:20'; payload.baseImages = herokuVersions; - } return payload; } @@ -384,7 +384,7 @@ export const setDefaultConfiguration = async (data: any) => { if (!publishDirectory) publishDirectory = template?.publishDirectory || null; if (baseDirectory) { if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`; - if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`; + if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1); } if (dockerFileLocation) { if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`; diff --git a/apps/api/src/lib/buildPacks/docker.ts b/apps/api/src/lib/buildPacks/docker.ts index c017b4bed..e8cfd89bc 100644 --- a/apps/api/src/lib/buildPacks/docker.ts +++ b/apps/api/src/lib/buildPacks/docker.ts @@ -14,12 +14,8 @@ export default async function (data) { dockerFileLocation } = data try { - const file = `${workdir}${dockerFileLocation}`; - let dockerFileOut = `${workdir}`; - if (baseDirectory) { - dockerFileOut = `${workdir}${baseDirectory}`; - workdir = `${workdir}${baseDirectory}`; - } + const file = `${workdir}${baseDirectory}${dockerFileLocation}`; + data.workdir = `${workdir}${baseDirectory}`; const Dockerfile: Array = (await fs.readFile(`${file}`, 'utf8')) .toString() .trim() @@ -28,7 +24,6 @@ export default async function (data) { if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { - // TODO: fix secrets if ( (pullmergeRequestId && secret.isPRMRSecret) || (!pullmergeRequestId && !secret.isPRMRSecret) @@ -45,7 +40,7 @@ export default async function (data) { }); } - await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n')); + await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n')); await buildImage(data); } catch (error) { throw error; diff --git a/apps/api/src/lib/buildPacks/heroku.ts b/apps/api/src/lib/buildPacks/heroku.ts index 3efdeaf6a..c9608aba2 100644 --- a/apps/api/src/lib/buildPacks/heroku.ts +++ b/apps/api/src/lib/buildPacks/heroku.ts @@ -2,38 +2,16 @@ import { executeDockerCmd, prisma } from "../common" import { saveBuildLog } from "./common"; export default async function (data: any): Promise { - const { buildId, applicationId, tag, dockerId, debug, workdir } = data + const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory } = data try { - await saveBuildLog({ line: `Building image started.`, buildId, applicationId }); - const { stdout } = await executeDockerCmd({ + await executeDockerCmd({ + debug, dockerId, - command: `pack build -p ${workdir} ${applicationId}:${tag} --builder heroku/buildpacks:20` + command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder heroku/buildpacks:20` }) - if (debug) { - const array = stdout.split('\n') - for (const line of array) { - if (line !== '\n') { - await saveBuildLog({ - line: `${line.replace('\n', '')}`, - buildId, - applicationId - }); - } - } - } await saveBuildLog({ line: `Building image successful.`, buildId, applicationId }); } catch (error) { - const array = error.stdout.split('\n') - for (const line of array) { - if (line !== '\n') { - await saveBuildLog({ - line: `${line.replace('\n', '')}`, - buildId, - applicationId - }); - } - } throw error; } } diff --git a/apps/api/src/lib/common.ts b/apps/api/src/lib/common.ts index cb2b10e17..db283d5a7 100644 --- a/apps/api/src/lib/common.ts +++ b/apps/api/src/lib/common.ts @@ -1,4 +1,4 @@ -import { exec } from 'node:child_process' +import { exec } from 'node:child_process'; import util from 'util'; import fs from 'fs/promises'; import yaml from 'js-yaml'; @@ -11,17 +11,17 @@ import { promises as dns } from 'dns'; import { PrismaClient } from '@prisma/client'; import cuid from 'cuid'; import os from 'os'; -import sshConfig from 'ssh-config' +import sshConfig from 'ssh-config'; import { checkContainer, removeContainer } from './docker'; import { day } from './dayjs'; -import * as serviceFields from './services/serviceFields' +import * as serviceFields from './services/serviceFields'; import { saveBuildLog } from './buildPacks/common'; import { scheduler } from './scheduler'; import { supportedServiceTypesAndVersions } from './services/supportedVersions'; import { includeServices } from './services/common'; -export const version = '3.10.4'; +export const version = '3.10.5'; export const isDev = process.env.NODE_ENV === 'development'; const algorithm = 'aes-256-ctr'; @@ -38,24 +38,24 @@ export const defaultProxyImageHttp = `coolify-haproxy-http-alpine:latest`; export const defaultTraefikImage = `traefik:v2.8`; export function getAPIUrl() { if (process.env.GITPOD_WORKSPACE_URL) { - const { href } = new URL(process.env.GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') - return newURL + const { href } = new URL(process.env.GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); + return newURL; } if (process.env.CODESANDBOX_HOST) { - return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3001')}` + return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; } return isDev ? 'http://host.docker.internal:3001' : 'http://localhost:3000'; } export function getUIUrl() { if (process.env.GITPOD_WORKSPACE_URL) { - const { href } = new URL(process.env.GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3000-').replace(/\/$/, '') - return newURL + const { href } = new URL(process.env.GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3000-').replace(/\/$/, ''); + return newURL; } if (process.env.CODESANDBOX_HOST) { - return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3000')}` + return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3000')}`; } return 'http://localhost:3000'; } @@ -68,67 +68,81 @@ const otherTraefikEndpoint = isDev ? `${getAPIUrl()}/webhooks/traefik/other.json` : 'http://coolify:3000/webhooks/traefik/other.json'; - export const uniqueName = (): string => uniqueNamesGenerator(customConfig); export const asyncExecShell = util.promisify(exec); -export const asyncExecShellStream = async ({ debug, buildId, applicationId, command, engine }: { debug: boolean, buildId: string, applicationId: string, command: string, engine: string }) => { +export const asyncExecShellStream = async ({ + debug, + buildId, + applicationId, + command, + engine +}: { + debug: boolean; + buildId: string; + applicationId: string; + command: string; + engine: string; +}) => { return await new Promise(async (resolve, reject) => { - const { execaCommand } = await import('execa') - const subprocess = execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine } }) - const errorLogs = [] - const logs = [] - subprocess.stdout.on('data', async (data) => { - const stdout = data.toString(); - const array = stdout.split('\n') - for (const line of array) { - if (line !== '\n' && line !== '') { - logs.push(line.replace('\n', '')) - debug && await saveBuildLog({ - line: `${line.replace('\n', '')}`, - buildId, - applicationId - }); + const { execaCommand } = await import('execa'); + const subprocess = execaCommand(command, { + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + }); + const logs = []; + subprocess.stdout.on('data', async (data) => { + const stdout = data.toString(); + const array = stdout.split('\n'); + for (const line of array) { + if (line !== '\n' && line !== '') { + const log = { + line: `${line.replace('\n', '')}`, + buildId, + applicationId + } + logs.push(log); + if (debug) { + await saveBuildLog(log); } } - }) - subprocess.stderr.on('data', async (data) => { - const stderr = data.toString(); - const array = stderr.split('\n') - for (const line of array) { - if (line !== '\n' && line !== '') { - errorLogs.push(line.replace('\n', '')) - debug && await saveBuildLog({ - line: `${line.replace('\n', '')}`, - buildId, - applicationId - }); + } + }); + subprocess.stderr.on('data', async (data) => { + const stderr = data.toString(); + const array = stderr.split('\n'); + for (const line of array) { + if (line !== '\n' && line !== '') { + const log = { + line: `${line.replace('\n', '')}`, + buildId, + applicationId + } + logs.push(log); + if (debug) { + await saveBuildLog(log); } } - }) + } + }); subprocess.on('exit', async (code) => { await asyncSleep(1000); if (code === 0) { - resolve(code) + resolve(code); } else { if (!debug) { - for (const line of errorLogs) { - await saveBuildLog({ - line: `${line.replace('\n', '')}`, - buildId, - applicationId - }); + for (const log of logs) { + await saveBuildLog(log); } } - reject(code) + reject(code); } - }) - }) -} + }); + }); +}; export const asyncSleep = (delay: number): Promise => new Promise((resolve) => setTimeout(resolve, delay)); export const prisma = new PrismaClient({ - errorFormat: 'minimal', + errorFormat: 'minimal' // log: [ // { // emit: 'event', @@ -176,10 +190,9 @@ export const decrypt = (hashString: string) => { ]); return decrpyted.toString(); } catch (error) { - console.log({ decryptionError: error.message }) - return hashString + console.log({ decryptionError: error.message }); + return hashString; } - } }; export const encrypt = (text: string) => { @@ -194,8 +207,6 @@ export const encrypt = (text: string) => { } }; - - export async function checkDoubleBranch(branch: string, projectId: number): Promise { const applications = await prisma.application.findMany({ where: { branch, projectId } }); return applications.length > 1; @@ -214,7 +225,7 @@ export async function isDNSValid(hostname: any, domain: string): Promise { resolves = await dns.resolve4(hostname); } } catch (error) { - throw 'Invalid DNS.' + throw 'Invalid DNS.'; } try { @@ -229,11 +240,10 @@ export async function isDNSValid(hostname: any, domain: string): Promise { } if (!ipDomainFound) throw false; } catch (error) { - throw 'DNS not set' + throw 'DNS not set'; } } - export function getDomain(domain: string): string { return domain?.replace('https://', '').replace('http://', ''); } @@ -259,7 +269,7 @@ export async function isDomainConfigured({ ], id: { not: id }, destinationDocker: { - remoteIpAddress, + remoteIpAddress } }, select: { fqdn: true } @@ -295,7 +305,10 @@ export async function isDomainConfigured({ export async function getContainerUsage(dockerId: string, container: string): Promise { try { - const { stdout } = await executeDockerCmd({ dockerId, command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"` }) + const { stdout } = await executeDockerCmd({ + dockerId, + command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"` + }); return JSON.parse(stdout); } catch (err) { return { @@ -324,7 +337,7 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P resolves = await dns.resolve4(hostname); } } catch (error) { - throw { status: 500, message: `Could not determine IP address for ${hostname}.` } + throw { status: 500, message: `Could not determine IP address for ${hostname}.` }; } if (dualCerts) { @@ -346,9 +359,15 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P } } if (ipDomainFound && ipDomainDualCertFound) return { status: 200 }; - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } catch (error) { - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } } else { try { @@ -360,9 +379,15 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P } } if (ipDomainFound) return { status: 200 }; - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } catch (error) { - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } } } @@ -436,63 +461,95 @@ export const supportedDatabaseTypesAndVersions = [ export async function getFreeSSHLocalPort(id: string): Promise { const { default: isReachable } = await import('is-port-reachable'); - const { remoteIpAddress, sshLocalPort } = await prisma.destinationDocker.findUnique({ where: { id } }) + const { remoteIpAddress, sshLocalPort } = await prisma.destinationDocker.findUnique({ + where: { id } + }); if (sshLocalPort) { - return Number(sshLocalPort) + return Number(sshLocalPort); } const data = await prisma.setting.findFirst(); const { minPort, maxPort } = data; - const ports = await prisma.destinationDocker.findMany({ where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } } }) + const ports = await prisma.destinationDocker.findMany({ + where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } } + }); const alreadyConfigured = await prisma.destinationDocker.findFirst({ where: { - remoteIpAddress, id: { not: id }, sshLocalPort: { not: null } + remoteIpAddress, + id: { not: id }, + sshLocalPort: { not: null } } - }) + }); if (alreadyConfigured?.sshLocalPort) { - await prisma.destinationDocker.update({ where: { id }, data: { sshLocalPort: alreadyConfigured.sshLocalPort } }) - return Number(alreadyConfigured.sshLocalPort) + await prisma.destinationDocker.update({ + where: { id }, + data: { sshLocalPort: alreadyConfigured.sshLocalPort } + }); + return Number(alreadyConfigured.sshLocalPort); } - const range = generateRangeArray(minPort, maxPort) - const availablePorts = range.filter(port => !ports.map(p => p.sshLocalPort).includes(port)) + const range = generateRangeArray(minPort, maxPort); + const availablePorts = range.filter((port) => !ports.map((p) => p.sshLocalPort).includes(port)); for (const port of availablePorts) { - const found = await isReachable(port, { host: 'localhost' }) + const found = await isReachable(port, { host: 'localhost' }); if (!found) { - await prisma.destinationDocker.update({ where: { id }, data: { sshLocalPort: Number(port) } }) - return Number(port) + await prisma.destinationDocker.update({ + where: { id }, + data: { sshLocalPort: Number(port) } + }); + return Number(port); } } - return false + return false; } export async function createRemoteEngineConfiguration(id: string) { const homedir = os.homedir(); - const sshKeyFile = `/tmp/id_rsa-${id}` + const sshKeyFile = `/tmp/id_rsa-${id}`; const localPort = await getFreeSSHLocalPort(id); - const { sshKey: { privateKey }, remoteIpAddress, remotePort, remoteUser } = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } }) - await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 }) + const { + sshKey: { privateKey }, + remoteIpAddress, + remotePort, + remoteUser + } = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } }); + await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 }); // Needed for remote docker compose - const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell(`ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l`) + const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell( + `ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l` + ); if (numberOfSSHAgentsRunning !== '' && Number(numberOfSSHAgentsRunning.trim()) == 0) { try { - await fs.stat(`/tmp/coolify-ssh-agent.pid`) - await fs.rm(`/tmp/coolify-ssh-agent.pid`) + await fs.stat(`/tmp/coolify-ssh-agent.pid`); + await fs.rm(`/tmp/coolify-ssh-agent.pid`); } catch (error) { } - await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`) + await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`); } - await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`) + await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`); - const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell(`ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l`) + const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell( + `ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l` + ); if (numberOfSSHTunnelsRunning !== '' && Number(numberOfSSHTunnelsRunning.trim()) == 0) { try { - await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}`) + await asyncExecShell( + `SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}` + ); } catch (error) { } - } - const config = sshConfig.parse('') - const found = config.find({ Host: remoteIpAddress }) + const config = sshConfig.parse(''); + const foundWildcard = config.find({ Host: '*' }); + if (!foundWildcard) { + config.append({ + Host: '*', + StrictHostKeyChecking: 'no', + ControlMaster: 'auto', + ControlPath: `${homedir}/.ssh/coolify-%r@%h:%p`, + ControlPersist: '10m' + }) + } + const found = config.find({ Host: remoteIpAddress }); if (!found) { config.append({ Host: remoteIpAddress, @@ -501,14 +558,15 @@ export async function createRemoteEngineConfiguration(id: string) { User: remoteUser, IdentityFile: sshKeyFile, StrictHostKeyChecking: 'no' - }) + }); } + try { - await fs.stat(`${homedir}/.ssh/`) + await fs.stat(`${homedir}/.ssh/`); } catch (error) { - await fs.mkdir(`${homedir}/.ssh/`) + await fs.mkdir(`${homedir}/.ssh/`); } - return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config)) + return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config)); } export async function executeSSHCmd({ dockerId, command }) { const { execaCommand } = await import('execa') @@ -531,17 +589,17 @@ export async function executeDockerCmd({ debug, buildId, applicationId, dockerId const { execaCommand } = await import('execa') let { remoteEngine, remoteIpAddress, engine, remoteUser } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } }) if (remoteEngine) { - await createRemoteEngineConfiguration(dockerId) - engine = `ssh://${remoteIpAddress}` + await createRemoteEngineConfiguration(dockerId); + engine = `ssh://${remoteIpAddress}`; } else { - engine = 'unix:///var/run/docker.sock' + engine = 'unix:///var/run/docker.sock'; } if (process.env.CODESANDBOX_HOST) { if (command.startsWith('docker compose')) { - command = command.replace(/docker compose/gi, 'docker-compose') + command = command.replace(/docker compose/gi, 'docker-compose'); } } - if (command.startsWith(`docker build --progress plain`)) { + if (command.startsWith(`docker build --progress plain`) || command.startsWith(`pack build`)) { return await asyncExecShellStream({ debug, buildId, applicationId, command, engine }); } return await execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }, shell: true }) @@ -552,22 +610,31 @@ export async function startTraefikProxy(id: string): Promise { const { id: settingsId, ipv4, ipv6 } = await listSettings(); if (!found) { - const { stdout: coolifyNetwork } = await executeDockerCmd({ dockerId: id, command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"` }) + const { stdout: coolifyNetwork } = await executeDockerCmd({ + dockerId: id, + command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"` + }); if (!coolifyNetwork) { - await executeDockerCmd({ dockerId: id, command: `docker network create --attachable coolify-infra` }) + await executeDockerCmd({ + dockerId: id, + command: `docker network create --attachable coolify-infra` + }); } - const { stdout: Config } = await executeDockerCmd({ dockerId: id, command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` }) + const { stdout: Config } = await executeDockerCmd({ + dockerId: id, + command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` + }); const ip = JSON.parse(Config)[0].Gateway; - let traefikUrl = mainTraefikEndpoint + let traefikUrl = mainTraefikEndpoint; if (remoteEngine) { - let ip = null + let ip = null; if (isDev) { - ip = getAPIUrl() + ip = getAPIUrl(); } else { - ip = `http://${ipv4 || ipv6}:3000` + ip = `http://${ipv4 || ipv6}:3000`; } - traefikUrl = `${ip}/webhooks/traefik/remote/${id}` + traefikUrl = `${ip}/webhooks/traefik/remote/${id}`; } await executeDockerCmd({ dockerId: id, @@ -593,7 +660,7 @@ export async function startTraefikProxy(id: string): Promise { --certificatesresolvers.letsencrypt.acme.storage=/etc/traefik/acme/acme.json \ --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \ --log.level=error` - }) + }); await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } }); await prisma.destinationDocker.update({ where: { id }, @@ -617,15 +684,17 @@ export async function startTraefikProxy(id: string): Promise { } export async function configureNetworkTraefikProxy(destination: any): Promise { - const { id } = destination + const { id } = destination; const { stdout: networks } = await executeDockerCmd({ dockerId: id, - command: - `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'` + command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'` }); const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(','); if (!configuredNetworks.includes(destination.network)) { - await executeDockerCmd({ dockerId: destination.id, command: `docker network connect ${destination.network} coolify-proxy` }) + await executeDockerCmd({ + dockerId: destination.id, + command: `docker network connect ${destination.network} coolify-proxy` + }); } } @@ -643,10 +712,8 @@ export async function stopTraefikProxy( if (found) { await executeDockerCmd({ dockerId: id, - command: - `docker stop -t 0 coolify-proxy && docker rm coolify-proxy` + command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy` }); - } } catch (error) { return error; @@ -659,10 +726,13 @@ export async function listSettings(): Promise { return settings; } - -export function generatePassword({ length = 24, symbols = false, isHex = false }: { length?: number, symbols?: boolean, isHex?: boolean } | null): string { +export function generatePassword({ + length = 24, + symbols = false, + isHex = false +}: { length?: number; symbols?: boolean; isHex?: boolean } | null): string { if (isHex) { - return crypto.randomBytes(length).toString("hex"); + return crypto.randomBytes(length).toString('hex'); } const password = generator.generate({ length, @@ -791,11 +861,11 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data image: `${baseImage}:${version}`, volume: `${id}-${type}-data:/bitnami/mysql/data`, ulimits: {} - } + }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/mysql`; } - return configuration + return configuration; } else if (type === 'mariadb') { const configuration: DatabaseConfiguration = { privatePort: 3306, @@ -813,7 +883,7 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/mysql`; } - return configuration + return configuration; } else if (type === 'mongodb') { const configuration: DatabaseConfiguration = { privatePort: 27017, @@ -829,10 +899,10 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data configuration.environmentVariables = { MONGO_INITDB_ROOT_USERNAME: rootUser, MONGO_INITDB_ROOT_PASSWORD: rootUserPassword - } + }; configuration.volume = `${id}-${type}-data:/data/db`; } - return configuration + return configuration; } else if (type === 'postgresql') { const configuration: DatabaseConfiguration = { privatePort: 5432, @@ -845,16 +915,16 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data image: `${baseImage}:${version}`, volume: `${id}-${type}-data:/bitnami/postgresql`, ulimits: {} - } + }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/postgresql`; configuration.environmentVariables = { POSTGRES_PASSWORD: dbUserPassword, POSTGRES_USER: dbUser, POSTGRES_DB: defaultDatabase - } + }; } - return configuration + return configuration; } else if (type === 'redis') { const { settings: { appendOnly } } = database; const configuration: DatabaseConfiguration = { @@ -870,9 +940,10 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/data`; - configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no'} --requirepass ${dbUserPassword}`; + configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no' + } --requirepass ${dbUserPassword}`; } - return configuration + return configuration; } else if (type === 'couchdb') { const configuration: DatabaseConfiguration = { privatePort: 5984, @@ -906,15 +977,15 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data } export function isARM(arch: string) { if (arch === 'arm' || arch === 'arm64') { - return true + return true; } - return false + return false; } export function getDatabaseImage(type: string, arch: string): string { const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type); if (found) { if (isARM(arch)) { - return found.baseImageARM || found.baseImage + return found.baseImageARM || found.baseImage; } return found.baseImage; } @@ -925,14 +996,13 @@ export function getDatabaseVersions(type: string, arch: string): string[] { const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type); if (found) { if (isARM(arch)) { - return found.versionsARM || found.versions + return found.versionsARM || found.versions; } return found.versions; } return []; } - export type ComposeFile = { version: ComposerFileVersion; services: Record; @@ -954,11 +1024,13 @@ export type ComposeFileService = { depends_on?: string[]; command?: string; ports?: string[]; - build?: { + build?: + | { context: string; dockerfile: string; args?: Record; - } | string; + } + | string; deploy?: { restart_policy?: { condition?: string; @@ -1038,10 +1110,7 @@ export const createDirectories = async ({ }; }; - -export async function stopDatabaseContainer( - database: any -): Promise { +export async function stopDatabaseContainer(database: any): Promise { let everStarted = false; const { id, @@ -1050,7 +1119,10 @@ export async function stopDatabaseContainer( } = database; if (destinationDockerId) { try { - const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` }) + const { stdout } = await executeDockerCmd({ + dockerId, + command: `docker inspect --format '{{json .State}}' ${id}` + }); if (stdout) { everStarted = true; @@ -1063,7 +1135,6 @@ export async function stopDatabaseContainer( return everStarted; } - export async function stopTcpHttpProxy( id: string, destinationDocker: any, @@ -1078,17 +1149,14 @@ export async function stopTcpHttpProxy( if (found) { return await executeDockerCmd({ dockerId, - command: - `docker stop -t 0 ${container} && docker rm ${container}` + command: `docker stop -t 0 ${container} && docker rm ${container}` }); - } } catch (error) { return error; } } - export async function updatePasswordInDb(database, user, newPassword, isRoot) { const { id, @@ -1106,55 +1174,52 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) { await executeDockerCmd({ dockerId, command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"` - }) + }); } else if (type === 'mariadb') { await executeDockerCmd({ dockerId, command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"` - }) - + }); } else if (type === 'postgresql') { if (isRoot) { await executeDockerCmd({ dockerId, command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"` - }) + }); } else { await executeDockerCmd({ dockerId, command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"` - }) + }); } } else if (type === 'mongodb') { await executeDockerCmd({ dockerId, command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"` - }) - + }); } else if (type === 'redis') { await executeDockerCmd({ dockerId, command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}` - }) - + }); } } } export async function checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress }: { id: string, configuredPort?: number, exposePort: number, engine: string, remoteEngine: boolean, remoteIpAddress?: string }) { if (exposePort < 1024 || exposePort > 65535) { - throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` } + throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` }; } if (configuredPort) { if (configuredPort !== exposePort) { const availablePort = await getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress); if (availablePort.toString() !== exposePort.toString()) { - throw { status: 500, message: `Port ${exposePort} is already in use.` } + throw { status: 500, message: `Port ${exposePort} is already in use.` }; } } } else { const availablePort = await getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress); if (availablePort.toString() !== exposePort.toString()) { - throw { status: 500, message: `Port ${exposePort} is already in use.` } + throw { status: 500, message: `Port ${exposePort} is already in use.` }; } } } @@ -1207,7 +1272,7 @@ export async function getFreeExposedPort(id, exposePort, engine, remoteEngine, r } } export function generateRangeArray(start, end) { - return Array.from({ length: (end - start) }, (v, k) => k + start); + return Array.from({ length: end - start }, (v, k) => k + start); } export async function getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress }) { const { default: isReachable } = await import('is-port-reachable'); @@ -1300,24 +1365,28 @@ export async function startTraefikTCPProxy( let dependentId = id; if (type === 'wordpressftp') dependentId = `${id}-ftp`; - const foundDependentContainer = await checkContainer({ dockerId, container: dependentId, remove: true }); + const foundDependentContainer = await checkContainer({ + dockerId, + container: dependentId, + remove: true + }); try { if (foundDependentContainer && !found) { const { stdout: Config } = await executeDockerCmd({ dockerId, command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` - }) + }); const ip = JSON.parse(Config)[0].Gateway; - let traefikUrl = otherTraefikEndpoint + let traefikUrl = otherTraefikEndpoint; if (remoteEngine) { - let ip = null + let ip = null; if (isDev) { - ip = getAPIUrl() + ip = getAPIUrl(); } else { - ip = `http://${ipv4 || ipv6}:3000` + ip = `http://${ipv4 || ipv6}:3000`; } - traefikUrl = `${ip}/webhooks/traefik/other.json` + traefikUrl = `${ip}/webhooks/traefik/other.json`; } const tcpProxy = { version: '3.8', @@ -1353,28 +1422,37 @@ export async function startTraefikTCPProxy( await executeDockerCmd({ dockerId, command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d` - }) + }); await fs.rm(`/tmp/docker-compose-${id}.yaml`); } if (!foundDependentContainer && found) { await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${container} && docker rm ${container}` - }) + }); } } catch (error) { return error; } } -export async function getServiceFromDB({ id, teamId }: { id: string; teamId: string }): Promise { +export async function getServiceFromDB({ + id, + teamId +}: { + id: string; + teamId: string; +}): Promise { const settings = await prisma.setting.findFirst(); const body = await prisma.service.findFirst({ where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, include: includeServices }); - let { type } = body - type = fixType(type) + if (!body) { + return null + } + let { type } = body; + type = fixType(type); if (body?.serviceSecret.length > 0) { body.serviceSecret = body.serviceSecret.map((s) => { @@ -1383,7 +1461,7 @@ export async function getServiceFromDB({ id, teamId }: { id: string; teamId: str }); } - body[type] = { ...body[type], ...getUpdateableFields(type, body[type]) } + body[type] = { ...body[type], ...getUpdateableFields(type, body[type]) }; return { ...body, settings }; } @@ -1407,52 +1485,52 @@ export function saveUpdateableFields(type: string, data: any) { const update = {}; if (type && serviceFields[type]) { serviceFields[type].map((k) => { - let temp = data[k.name] + let temp = data[k.name]; if (temp) { if (k.isEncrypted) { - temp = encrypt(temp) + temp = encrypt(temp); } if (k.isLowerCase) { - temp = temp.toLowerCase() + temp = temp.toLowerCase(); } if (k.isNumber) { - temp = Number(temp) + temp = Number(temp); } if (k.isBoolean) { - temp = Boolean(temp) + temp = Boolean(temp); } } if (k.isNumber && temp === '') { - temp = null + temp = null; } - update[k.name] = temp + update[k.name] = temp; }); } - return update + return update; } export function getUpdateableFields(type: string, data: any) { const update = {}; if (type && serviceFields[type]) { serviceFields[type].map((k) => { - let temp = data[k.name] + let temp = data[k.name]; if (temp) { if (k.isEncrypted) { - temp = decrypt(temp) + temp = decrypt(temp); } - update[k.name] = temp + update[k.name] = temp; } - update[k.name] = temp + update[k.name] = temp; }); } - return update + return update; } export function fixType(type) { // Hack to fix the type case sensitivity... if (type === 'plausibleanalytics') type = 'plausibleAnalytics'; if (type === 'meilisearch') type = 'meiliSearch'; - return type + return type; } export const getServiceMainPort = (service: string) => { @@ -1463,7 +1541,6 @@ export const getServiceMainPort = (service: string) => { return null; }; - export function makeLabelForServices(type) { return [ 'coolify.managed=true', @@ -1472,8 +1549,14 @@ export function makeLabelForServices(type) { `coolify.service.type=${type}` ]; } -export function errorHandler({ status = 500, message = 'Unknown error.' }: { status: number, message: string | any }) { - if (message.message) message = message.message +export function errorHandler({ + status = 500, + message = 'Unknown error.' +}: { + status: number; + message: string | any; +}) { + if (message.message) message = message.message; throw { status, message }; } export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> { @@ -1494,8 +1577,12 @@ export async function generateSshKeyPair(): Promise<{ publicKey: string; private export async function stopBuild(buildId, applicationId) { let count = 0; await new Promise(async (resolve, reject) => { - const { destinationDockerId, status } = await prisma.build.findFirst({ where: { id: buildId } }); - const { id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } }); + const { destinationDockerId, status } = await prisma.build.findFirst({ + where: { id: buildId } + }); + const { id: dockerId } = await prisma.destinationDocker.findFirst({ + where: { id: destinationDockerId } + }); const interval = setInterval(async () => { try { if (status === 'failed' || status === 'canceled') { @@ -1505,12 +1592,15 @@ export async function stopBuild(buildId, applicationId) { if (count > 15) { clearInterval(interval); if (scheduler.workers.has('deployApplication')) { - scheduler.workers.get('deployApplication').postMessage('cancel') + scheduler.workers.get('deployApplication').postMessage('cancel'); } await cleanupDB(buildId, applicationId); return reject(new Error('Deployment canceled.')); } - const { stdout: buildContainers } = await executeDockerCmd({ dockerId, command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` }) + const { stdout: buildContainers } = await executeDockerCmd({ + dockerId, + command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` + }); if (buildContainers) { const containersArray = buildContainers.trim().split('\n'); for (const container of containersArray) { @@ -1520,7 +1610,7 @@ export async function stopBuild(buildId, applicationId) { await removeContainer({ id, dockerId }); clearInterval(interval); if (scheduler.workers.has('deployApplication')) { - scheduler.workers.get('deployApplication').postMessage('cancel') + scheduler.workers.get('deployApplication').postMessage('cancel'); } await cleanupDB(buildId, applicationId); return resolve(); @@ -1543,37 +1633,43 @@ async function cleanupDB(buildId: string, applicationId: string) { export function convertTolOldVolumeNames(type) { if (type === 'nocodb') { - return 'nc' + return 'nc'; } } export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) { // Cleanup old coolify images try { - let { stdout: images } = await executeDockerCmd({ dockerId, command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r` }) + let { stdout: images } = await executeDockerCmd({ + dockerId, + command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r` + }); images = images.trim(); if (images) { - await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` }) + await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` }); } } catch (error) { } if (lowDiskSpace || force) { if (isDev) { if (!force) console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`); - return + return; } try { - await executeDockerCmd({ dockerId, command: `docker container prune -f --filter "label=coolify.managed=true"` }) + await executeDockerCmd({ + dockerId, + command: `docker container prune -f --filter "label=coolify.managed=true"` + }); } catch (error) { } try { - await executeDockerCmd({ dockerId, command: `docker image prune -f` }) + await executeDockerCmd({ dockerId, command: `docker image prune -f` }); } catch (error) { } try { - await executeDockerCmd({ dockerId, command: `docker image prune -a -f` }) + await executeDockerCmd({ dockerId, command: `docker image prune -a -f` }); } catch (error) { } // Cleanup build caches try { - await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` }) + await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` }); } catch (error) { } } } @@ -1587,7 +1683,6 @@ export function persistentVolumes(id, persistentStorage, config) { volumeSet.add(volume); } } - } } const volumesArray = Array.from(volumeSet); @@ -1596,21 +1691,21 @@ export function persistentVolumes(id, persistentStorage, config) { return `${id}${storage.path.replace(/\//gi, '-')}:${storage.path}`; }) || []; - let volumes = [...persistentVolume] - if (volumesArray) volumes = [...volumesArray, ...volumes] - const composeVolumes = volumes.length > 0 && volumes.map((volume) => { - return { - [`${volume.split(':')[0]}`]: { - name: volume.split(':')[0] - } - }; - }) || [] + let volumes = [...persistentVolume]; + if (volumesArray) volumes = [...volumesArray, ...volumes]; + const composeVolumes = + (volumes.length > 0 && + volumes.map((volume) => { + return { + [`${volume.split(':')[0]}`]: { + name: volume.split(':')[0] + } + }; + })) || + []; - const volumeMounts = Object.assign( - {}, - ...composeVolumes - ) || {} - return { volumeMounts } + const volumeMounts = Object.assign({}, ...composeVolumes) || {}; + return { volumeMounts }; } export function defaultComposeConfiguration(network: string): any { return { @@ -1624,25 +1719,29 @@ export function defaultComposeConfiguration(network: string): any { window: '120s' } } - } + }; } export function decryptApplication(application: any) { if (application) { if (application?.gitSource?.githubApp?.clientSecret) { - application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null; + application.gitSource.githubApp.clientSecret = + decrypt(application.gitSource.githubApp.clientSecret) || null; } if (application?.gitSource?.githubApp?.webhookSecret) { - application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null; + application.gitSource.githubApp.webhookSecret = + decrypt(application.gitSource.githubApp.webhookSecret) || null; } if (application?.gitSource?.githubApp?.privateKey) { - application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null; + application.gitSource.githubApp.privateKey = + decrypt(application.gitSource.githubApp.privateKey) || null; } if (application?.gitSource?.gitlabApp?.appSecret) { - application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null; + application.gitSource.gitlabApp.appSecret = + decrypt(application.gitSource.gitlabApp.appSecret) || null; } if (application?.secrets.length > 0) { application.secrets = application.secrets.map((s: any) => { - s.value = decrypt(s.value) || null + s.value = decrypt(s.value) || null; return s; }); } diff --git a/apps/api/src/lib/scheduler.ts b/apps/api/src/lib/scheduler.ts index ebff53e12..743463757 100644 --- a/apps/api/src/lib/scheduler.ts +++ b/apps/api/src/lib/scheduler.ts @@ -9,8 +9,8 @@ Bree.extend(TSBree); const options: any = { defaultExtension: 'js', - // logger: new Cabin(), - logger: false, + logger: new Cabin(), + // logger: false, workerMessageHandler: async ({ name, message }) => { if (name === 'deployApplication' && message?.deploying) { if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) { diff --git a/apps/api/src/lib/services/common.ts b/apps/api/src/lib/services/common.ts index f6174ca69..716dc44c0 100644 --- a/apps/api/src/lib/services/common.ts +++ b/apps/api/src/lib/services/common.ts @@ -20,7 +20,7 @@ export const includeServices: any = { glitchTip: true, searxng: true, weblate: true, - taiga: true + taiga: true, }; export async function configureServiceType({ id, @@ -378,6 +378,6 @@ export async function removeService({ id }: { id: string }): Promise { await prisma.searxng.deleteMany({ where: { serviceId: id } }); await prisma.weblate.deleteMany({ where: { serviceId: id } }); await prisma.taiga.deleteMany({ where: { serviceId: id } }); - + await prisma.service.delete({ where: { id } }); } \ No newline at end of file diff --git a/apps/api/src/lib/services/handlers.ts b/apps/api/src/lib/services/handlers.ts index f6aaa6be5..227383f9a 100644 --- a/apps/api/src/lib/services/handlers.ts +++ b/apps/api/src/lib/services/handlers.ts @@ -5,6 +5,7 @@ import bcrypt from 'bcryptjs'; import { ServiceStartStop } from '../../routes/api/v1/services/types'; import { asyncSleep, ComposeFile, createDirectories, defaultComposeConfiguration, errorHandler, executeDockerCmd, getDomain, getFreePublicPort, getServiceFromDB, getServiceImage, getServiceMainPort, isARM, isDev, makeLabelForServices, persistentVolumes, prisma } from '../common'; import { defaultServiceConfigurations } from '../services'; +import { OnlyId } from '../../types'; export async function startService(request: FastifyRequest) { try { @@ -69,6 +70,13 @@ export async function startService(request: FastifyRequest) { if (type === 'taiga') { return await startTaigaService(request) } + if (type === 'grafana') { + return await startGrafanaService(request) + } + if (type === 'trilium') { + return await startTriliumService(request) + } + throw `Service type ${type} not supported.` } catch (error) { throw { status: 500, message: error?.message || error } @@ -314,7 +322,7 @@ async function startMinioService(request: FastifyRequest) { destinationDocker, persistentStorage, exposePort, - minio: { rootUser, rootUserPassword }, + minio: { rootUser, rootUserPassword, apiFqdn }, serviceSecret } = service; @@ -333,7 +341,7 @@ async function startMinioService(request: FastifyRequest) { image: `${image}:${version}`, volumes: [`${id}-minio-data:/data`], environmentVariables: { - MINIO_SERVER_URL: fqdn, + MINIO_SERVER_URL: apiFqdn, MINIO_DOMAIN: getDomain(fqdn), MINIO_ROOT_USER: rootUser, MINIO_ROOT_PASSWORD: rootUserPassword, @@ -900,8 +908,8 @@ async function startMeilisearchService(request: FastifyRequest const { meiliSearch: { masterKey } } = service; - const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } = - service; + const { type, version, destinationDockerId, destinationDocker, + serviceSecret, exposePort, persistentStorage } = service; const network = destinationDockerId && destinationDocker.network; const port = getServiceMainPort('meilisearch'); @@ -2640,3 +2648,132 @@ async function startTaigaService(request: FastifyRequest) { } } +async function startGrafanaService(request: FastifyRequest) { + try { + const { id } = request.params; + const teamId = request.user.teamId; + const service = await getServiceFromDB({ id, teamId }); + const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } = + service; + const network = destinationDockerId && destinationDocker.network; + const port = getServiceMainPort('grafana'); + + const { workdir } = await createDirectories({ repository: type, buildId: id }); + const image = getServiceImage(type); + + const config = { + grafana: { + image: `${image}:${version}`, + volumes: [`${id}-grafana:/var/lib/grafana`], + environmentVariables: {} + } + }; + if (serviceSecret.length > 0) { + serviceSecret.forEach((secret) => { + config.grafana.environmentVariables[secret.name] = secret.value; + }); + } + const { volumeMounts } = persistentVolumes(id, persistentStorage, config) + const composeFile: ComposeFile = { + version: '3.8', + services: { + [id]: { + container_name: id, + image: config.grafana.image, + volumes: config.grafana.volumes, + environment: config.grafana.environmentVariables, + ...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}), + labels: makeLabelForServices('grafana'), + ...defaultComposeConfiguration(network), + } + }, + networks: { + [network]: { + external: true + } + }, + volumes: volumeMounts + }; + const composeFileDestination = `${workdir}/docker-compose.yaml`; + await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); + await startServiceContainers(destinationDocker.id, composeFileDestination) + return {} + } catch ({ status, message }) { + return errorHandler({ status, message }) + } +} +async function startTriliumService(request: FastifyRequest) { + try { + const { id } = request.params; + const teamId = request.user.teamId; + const service = await getServiceFromDB({ id, teamId }); + const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } = + service; + const network = destinationDockerId && destinationDocker.network; + const port = getServiceMainPort('trilium'); + + const { workdir } = await createDirectories({ repository: type, buildId: id }); + const image = getServiceImage(type); + + const config = { + trilium: { + image: `${image}:${version}`, + volumes: [`${id}-trilium:/home/node/trilium-data`], + environmentVariables: {} + } + }; + if (serviceSecret.length > 0) { + serviceSecret.forEach((secret) => { + config.trilium.environmentVariables[secret.name] = secret.value; + }); + } + const { volumeMounts } = persistentVolumes(id, persistentStorage, config) + const composeFile: ComposeFile = { + version: '3.8', + services: { + [id]: { + container_name: id, + image: config.trilium.image, + volumes: config.trilium.volumes, + environment: config.trilium.environmentVariables, + ...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}), + labels: makeLabelForServices('trilium'), + ...defaultComposeConfiguration(network), + } + }, + networks: { + [network]: { + external: true + } + }, + volumes: volumeMounts + }; + const composeFileDestination = `${workdir}/docker-compose.yaml`; + await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); + await startServiceContainers(destinationDocker.id, composeFileDestination) + return {} + } catch ({ status, message }) { + return errorHandler({ status, message }) + } +} + +export async function migrateAppwriteDB(request: FastifyRequest, reply: FastifyReply) { + try { + const { id } = request.params + const teamId = request.user.teamId; + const { + destinationDockerId, + destinationDocker, + } = await getServiceFromDB({ id, teamId }); + if (destinationDockerId) { + await executeDockerCmd({ + dockerId: destinationDocker.id, + command: `docker exec ${id} migrate` + }) + return await reply.code(201).send() + } + throw { status: 500, message: 'Could cleanup logs.' } + } catch ({ status, message }) { + return errorHandler({ status, message }) + } +} diff --git a/apps/api/src/lib/services/supportedVersions.ts b/apps/api/src/lib/services/supportedVersions.ts index 643215105..526f49379 100644 --- a/apps/api/src/lib/services/supportedVersions.ts +++ b/apps/api/src/lib/services/supportedVersions.ts @@ -172,8 +172,8 @@ export const supportedServiceTypesAndVersions = [ fancyName: 'Appwrite', baseImage: 'appwrite/appwrite', images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'], - versions: ['latest', '1.0','0.15.3'], - recommendedVersion: '0.15.3', + versions: ['latest', '1.0', '0.15.3'], + recommendedVersion: '1.0', ports: { main: 80 } @@ -233,4 +233,26 @@ export const supportedServiceTypesAndVersions = [ // main: 80 // } // }, + { + name: 'grafana', + fancyName: 'Grafana Dashboard', + baseImage: 'grafana/grafana', + images: [], + versions: ['latest', '9.1.3', '9.1.2', '9.0.8', '8.3.11', '8.4.11', '8.5.11'], + recommendedVersion: 'latest', + ports: { + main: 3000 + } + }, + { + name: 'trilium', + fancyName: 'Trilium Notes', + baseImage: 'zadam/trilium', + images: [], + versions: ['latest'], + recommendedVersion: 'latest', + ports: { + main: 8080 + } + }, ]; \ No newline at end of file diff --git a/apps/api/src/routes/api/v1/applications/handlers.ts b/apps/api/src/routes/api/v1/applications/handlers.ts index ffb9e64d4..0a2414a12 100644 --- a/apps/api/src/routes/api/v1/applications/handlers.ts +++ b/apps/api/src/routes/api/v1/applications/handlers.ts @@ -321,17 +321,12 @@ export async function saveApplication(request: FastifyRequest, export async function saveApplicationSettings(request: FastifyRequest, reply: FastifyReply) { try { const { id } = request.params - const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot, isDBBranching } = request.body - // const isDouble = await checkDoubleBranch(branch, projectId); - // if (isDouble && autodeploy) { - // await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false } }) - // throw { status: 500, message: 'Cannot activate automatic deployments until only one application is defined for this repository / branch.' } - // } + const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot, isDBBranching, isCustomSSL } = request.body await prisma.application.update({ where: { id }, - data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching } } }, + data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching, isCustomSSL } } }, include: { destinationDocker: true } - }); + }); return reply.code(201).send(); } catch ({ status, message }) { return errorHandler({ status, message }) @@ -787,64 +782,74 @@ export async function saveConnectedDatabase(request, reply) { export async function getSecrets(request: FastifyRequest) { try { const { id } = request.params + let secrets = await prisma.secret.findMany({ - where: { applicationId: id }, - orderBy: { createdAt: 'desc' } + where: { applicationId: id, isPRMRSecret: false }, + orderBy: { createdAt: 'asc' } }); + let previewSecrets = await prisma.secret.findMany({ + where: { applicationId: id, isPRMRSecret: true }, + orderBy: { createdAt: 'asc' } + }); + secrets = secrets.map((secret) => { secret.value = decrypt(secret.value); return secret; }); - secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => { - return ('' + a.name).localeCompare(b.name); - }) + previewSecrets = previewSecrets.map((secret) => { + secret.value = decrypt(secret.value); + return secret; + }); + return { - secrets + previewSecrets: previewSecrets.sort((a, b) => { + return ('' + a.name).localeCompare(b.name); + }), + secrets: secrets.sort((a, b) => { + return ('' + a.name).localeCompare(b.name); + }) } } catch ({ status, message }) { return errorHandler({ status, message }) } } +export async function updatePreviewSecret(request: FastifyRequest, reply: FastifyReply) { + try { + const { id } = request.params + const { name, value } = request.body + await prisma.secret.updateMany({ + where: { applicationId: id, name, isPRMRSecret: true }, + data: { value: encrypt(value.trim()) } + }); + return reply.code(201).send() + } catch ({ status, message }) { + return errorHandler({ status, message }) + } +} +export async function updateSecret(request: FastifyRequest, reply: FastifyReply) { + try { + const { id } = request.params + const { name, value, isBuildSecret = undefined } = request.body + await prisma.secret.updateMany({ + where: { applicationId: id, name }, + data: { value: encrypt(value.trim()), isBuildSecret } + }); + return reply.code(201).send() + } catch ({ status, message }) { + return errorHandler({ status, message }) + } +} export async function saveSecret(request: FastifyRequest, reply: FastifyReply) { try { const { id } = request.params - let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body - if (isNew) { - const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } }); - if (found) { - throw { status: 500, message: `Secret ${name} already exists.` } - } else { - value = encrypt(value.trim()); - await prisma.secret.create({ - data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } } - }); - } - } else { - if (value) { - value = encrypt(value.trim()); - } - const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } }); - - if (found) { - if (!value && isPRMRSecret) { - await prisma.secret.deleteMany({ - where: { applicationId: id, name, isPRMRSecret } - }); - } else { - - await prisma.secret.updateMany({ - where: { applicationId: id, name, isPRMRSecret }, - data: { value, isBuildSecret, isPRMRSecret } - }); - } - - } else { - await prisma.secret.create({ - data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } } - }); - } - } + const { name, value, isBuildSecret = false } = request.body + await prisma.secret.create({ + data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: false, application: { connect: { id } } } + }); + await prisma.secret.create({ + data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: true, application: { connect: { id } } } + }); return reply.code(201).send() } catch ({ status, message }) { return errorHandler({ status, message }) diff --git a/apps/api/src/routes/api/v1/applications/index.ts b/apps/api/src/routes/api/v1/applications/index.ts index 74370de4d..af39e63ce 100644 --- a/apps/api/src/routes/api/v1/applications/index.ts +++ b/apps/api/src/routes/api/v1/applications/index.ts @@ -1,6 +1,6 @@ import { FastifyPluginAsync } from 'fastify'; import { OnlyId } from '../../../../types'; -import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers'; +import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers'; import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types'; @@ -30,6 +30,8 @@ const root: FastifyPluginAsync = async (fastify): Promise => { fastify.get('/:id/secrets', async (request) => await getSecrets(request)); fastify.post('/:id/secrets', async (request, reply) => await saveSecret(request, reply)); + fastify.put('/:id/secrets', async (request, reply) => await updateSecret(request, reply)); + fastify.put('/:id/secrets/preview', async (request, reply) => await updatePreviewSecret(request, reply)); fastify.delete('/:id/secrets', async (request) => await deleteSecret(request)); fastify.get('/:id/storages', async (request) => await getStorages(request)); diff --git a/apps/api/src/routes/api/v1/applications/types.ts b/apps/api/src/routes/api/v1/applications/types.ts index 0699518a5..443deb00f 100644 --- a/apps/api/src/routes/api/v1/applications/types.ts +++ b/apps/api/src/routes/api/v1/applications/types.ts @@ -26,7 +26,7 @@ export interface SaveApplication extends OnlyId { } export interface SaveApplicationSettings extends OnlyId { Querystring: { domain: string; }; - Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean }; + Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean, isCustomSSL: boolean }; } export interface DeleteApplication extends OnlyId { Querystring: { domain: string; }; @@ -65,7 +65,7 @@ export interface SaveSecret extends OnlyId { name: string, value: string, isBuildSecret: boolean, - isPRMRSecret: boolean, + previewSecret: boolean, isNew: boolean } } diff --git a/apps/api/src/routes/api/v1/index.ts b/apps/api/src/routes/api/v1/index.ts index bab30236b..1f5ab0696 100644 --- a/apps/api/src/routes/api/v1/index.ts +++ b/apps/api/src/routes/api/v1/index.ts @@ -1,6 +1,9 @@ import { FastifyPluginAsync } from 'fastify'; import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers'; import { GetCurrentUser } from './types'; +import pump from 'pump' +import fs from 'fs' +import { asyncExecShell, encrypt, errorHandler, prisma } from '../../../lib/common'; export interface Update { Body: { latestVersion: string } @@ -23,9 +26,7 @@ const root: FastifyPluginAsync = async (fastify): Promise => { onRequest: [fastify.authenticate] }, async (request) => await getCurrentUser(request, fastify)); - fastify.get('/undead', { - onRequest: [fastify.authenticate] - }, async function () { + fastify.get('/undead', async function () { return { message: 'nope' }; }); @@ -47,7 +48,7 @@ const root: FastifyPluginAsync = async (fastify): Promise => { onRequest: [fastify.authenticate] }, async (request) => await restartCoolify(request)); - fastify.post('/internal/resetQueue', { + fastify.post('/internal/resetQueue', { onRequest: [fastify.authenticate] }, async (request) => await resetQueue(request)); diff --git a/apps/api/src/routes/api/v1/services/index.ts b/apps/api/src/routes/api/v1/services/index.ts index 454f4c06e..17ac60fdd 100644 --- a/apps/api/src/routes/api/v1/services/index.ts +++ b/apps/api/src/routes/api/v1/services/index.ts @@ -30,7 +30,7 @@ import { import type { OnlyId } from '../../../../types'; import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types'; -import { startService, stopService } from '../../../../lib/services/handlers'; +import { migrateAppwriteDB, startService, stopService } from '../../../../lib/services/handlers'; const root: FastifyPluginAsync = async (fastify): Promise => { fastify.addHook('onRequest', async (request) => { @@ -76,6 +76,8 @@ const root: FastifyPluginAsync = async (fastify): Promise => { fastify.post('/:id/plausibleanalytics/activate', async (request, reply) => await activatePlausibleUsers(request, reply)); fastify.post('/:id/plausibleanalytics/cleanup', async (request, reply) => await cleanupPlausibleLogs(request, reply)); fastify.post('/:id/wordpress/ftp', async (request, reply) => await activateWordpressFtp(request, reply)); + + fastify.post('/:id/appwrite/migrate', async (request, reply) => await migrateAppwriteDB(request, reply)); }; export default root; diff --git a/apps/api/src/routes/api/v1/settings/handlers.ts b/apps/api/src/routes/api/v1/settings/handlers.ts index 831ce606e..f6364e91d 100644 --- a/apps/api/src/routes/api/v1/settings/handlers.ts +++ b/apps/api/src/routes/api/v1/settings/handlers.ts @@ -1,8 +1,9 @@ import { promises as dns } from 'dns'; +import { X509Certificate } from 'node:crypto'; import type { FastifyReply, FastifyRequest } from 'fastify'; -import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, getDomain, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common'; -import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types'; +import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common'; +import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types'; export async function listAllSettings(request: FastifyRequest) { @@ -16,8 +17,16 @@ export async function listAllSettings(request: FastifyRequest) { unencryptedKeys.push({ id: key.id, name: key.name, privateKey: decrypt(key.privateKey), createdAt: key.createdAt }) } } + const certificates = await prisma.certificate.findMany({ where: { team: { id: teamId } } }) + let cns = []; + for (const certificate of certificates) { + const x509 = new X509Certificate(certificate.cert); + cns.push({ commonName: x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''), id: certificate.id, createdAt: certificate.createdAt }) + } + return { settings, + certificates: cns, sshKeys: unencryptedKeys } } catch ({ status, message }) { @@ -118,7 +127,7 @@ export async function saveSSHKey(request: FastifyRequest, reply: Fas return errorHandler({ status, message }) } } -export async function deleteSSHKey(request: FastifyRequest, reply: FastifyReply) { +export async function deleteSSHKey(request: FastifyRequest, reply: FastifyReply) { try { const { id } = request.body; await prisma.sshKey.delete({ where: { id } }) @@ -126,4 +135,15 @@ export async function deleteSSHKey(request: FastifyRequest, reply: } catch ({ status, message }) { return errorHandler({ status, message }) } +} + +export async function deleteCertificates(request: FastifyRequest, reply: FastifyReply) { + try { + const { id } = request.body; + await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`) + await prisma.certificate.delete({ where: { id } }) + return reply.code(201).send() + } catch ({ status, message }) { + return errorHandler({ status, message }) + } } \ No newline at end of file diff --git a/apps/api/src/routes/api/v1/settings/index.ts b/apps/api/src/routes/api/v1/settings/index.ts index 96da5948b..45e418b34 100644 --- a/apps/api/src/routes/api/v1/settings/index.ts +++ b/apps/api/src/routes/api/v1/settings/index.ts @@ -1,21 +1,59 @@ import { FastifyPluginAsync } from 'fastify'; -import { checkDNS, checkDomain, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers'; -import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types'; +import { X509Certificate } from 'node:crypto'; + +import { encrypt, errorHandler, prisma } from '../../../../lib/common'; +import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers'; +import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types'; const root: FastifyPluginAsync = async (fastify): Promise => { - fastify.addHook('onRequest', async (request) => { - return await request.jwtVerify() - }) - fastify.get('/', async (request) => await listAllSettings(request)); - fastify.post('/', async (request, reply) => await saveSettings(request, reply)); - fastify.delete('/', async (request, reply) => await deleteDomain(request, reply)); + fastify.addHook('onRequest', async (request) => { + return await request.jwtVerify() + }) + fastify.get('/', async (request) => await listAllSettings(request)); + fastify.post('/', async (request, reply) => await saveSettings(request, reply)); + fastify.delete('/', async (request, reply) => await deleteDomain(request, reply)); - fastify.get('/check', async (request) => await checkDNS(request)); - fastify.post('/check', async (request) => await checkDomain(request)); + fastify.get('/check', async (request) => await checkDNS(request)); + fastify.post('/check', async (request) => await checkDomain(request)); - fastify.post('/sshKey', async (request, reply) => await saveSSHKey(request, reply)); - fastify.delete('/sshKey', async (request, reply) => await deleteSSHKey(request, reply)); + fastify.post('/sshKey', async (request, reply) => await saveSSHKey(request, reply)); + fastify.delete('/sshKey', async (request, reply) => await deleteSSHKey(request, reply)); + + fastify.post('/upload', async (request) => { + try { + const teamId = request.user.teamId; + const certificates = await prisma.certificate.findMany({}) + let cns = []; + for (const certificate of certificates) { + const x509 = new X509Certificate(certificate.cert); + cns.push(x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', '')) + } + const parts = await request.files() + let key = null + let cert = null + for await (const part of parts) { + const name = part.fieldname + if (name === 'key') key = (await part.toBuffer()).toString() + if (name === 'cert') cert = (await part.toBuffer()).toString() + } + const x509 = new X509Certificate(cert); + const cn = x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', '') + if (cns.includes(cn)) { + throw { + message: `A certificate with ${cn} common name already exists.` + } + } + await prisma.certificate.create({ data: { cert, key: encrypt(key), team: { connect: { id: teamId } } } }) + await prisma.applicationSettings.updateMany({ where: { application: { AND: [{ fqdn: { endsWith: cn } }, { fqdn: { startsWith: 'https' } }] } }, data: { isCustomSSL: true } }) + return { message: 'Certificated uploaded' } + } catch ({ status, message }) { + return errorHandler({ status, message }); + } + + }); + fastify.delete('/certificate', async (request, reply) => await deleteCertificates(request, reply)) + // fastify.get('/certificates', async (request) => await getCertificates(request)) }; export default root; diff --git a/apps/api/src/routes/api/v1/settings/types.ts b/apps/api/src/routes/api/v1/settings/types.ts index 956c58b5c..618101bba 100644 --- a/apps/api/src/routes/api/v1/settings/types.ts +++ b/apps/api/src/routes/api/v1/settings/types.ts @@ -41,4 +41,9 @@ export interface DeleteSSHKey { Body: { id: string } +} +export interface OnlyIdInBody { + Body: { + id: string + } } \ No newline at end of file diff --git a/apps/api/src/routes/webhooks/traefik/handlers.ts b/apps/api/src/routes/webhooks/traefik/handlers.ts index d79c2cb01..381869430 100644 --- a/apps/api/src/routes/webhooks/traefik/handlers.ts +++ b/apps/api/src/routes/webhooks/traefik/handlers.ts @@ -6,7 +6,7 @@ import { TraefikOtherConfiguration } from "./types"; import { OnlyId } from "../../../types"; function configureMiddleware( - { id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type }, + { id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type, isCustomSSL }, traefik ) { if (isHttps) { @@ -55,7 +55,7 @@ function configureMiddleware( entrypoints: ['websecure'], rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`, service: `${id}`, - tls: { + tls: isCustomSSL ? true : { certresolver: 'letsencrypt' }, middlewares: [] @@ -66,7 +66,7 @@ function configureMiddleware( entrypoints: ['websecure'], rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`, service: `${id}`, - tls: { + tls: isCustomSSL ? true : { certresolver: 'letsencrypt' }, middlewares: [] @@ -99,7 +99,7 @@ function configureMiddleware( entrypoints: ['websecure'], rule: `Host(\`${domain}\`) && PathPrefix(\`/\`)`, service: `${id}`, - tls: { + tls: isCustomSSL ? true : { certresolver: 'letsencrypt' }, middlewares: [] @@ -178,7 +178,19 @@ function configureMiddleware( export async function traefikConfiguration(request, reply) { try { + const sslpath = '/etc/traefik/acme/custom'; + const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } } } } }) + let parsedCertificates = [] + for (const certificate of certificates) { + parsedCertificates.push({ + certFile: `${sslpath}/${certificate.id}-cert.pem`, + keyFile: `${sslpath}/${certificate.id}-key.pem` + }) + } const traefik = { + tls: { + certificates: parsedCertificates + }, http: { routers: {}, services: {}, @@ -224,7 +236,7 @@ export async function traefikConfiguration(request, reply) { port, destinationDocker, destinationDockerId, - settings: { previews, dualCerts } + settings: { previews, dualCerts, isCustomSSL } } = application; if (destinationDockerId) { const { network, id: dockerId } = destinationDocker; @@ -244,7 +256,8 @@ export async function traefikConfiguration(request, reply) { isRunning, isHttps, isWWW, - isDualCerts: dualCerts + isDualCerts: dualCerts, + isCustomSSL }); } if (previews) { @@ -267,7 +280,8 @@ export async function traefikConfiguration(request, reply) { nakedDomain, isHttps, isWWW, - isDualCerts: dualCerts + isDualCerts: dualCerts, + isCustomSSL }); } } @@ -534,7 +548,19 @@ export async function traefikOtherConfiguration(request: FastifyRequest) { const { id } = request.params try { + const sslpath = '/etc/traefik/acme/custom'; + const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true } } } } }) + let parsedCertificates = [] + for (const certificate of certificates) { + parsedCertificates.push({ + certFile: `${sslpath}/${certificate.id}-cert.pem`, + keyFile: `${sslpath}/${certificate.id}-key.pem` + }) + } const traefik = { + tls: { + certificates: parsedCertificates + }, http: { routers: {}, services: {}, diff --git a/apps/ui/package.json b/apps/ui/package.json index 74e232854..805539a5d 100644 --- a/apps/ui/package.json +++ b/apps/ui/package.json @@ -42,13 +42,14 @@ }, "type": "module", "dependencies": { - "dayjs": "1.11.5", "@sveltejs/adapter-static": "1.0.0-next.39", "@tailwindcss/typography": "^0.5.7", "cuid": "2.1.8", "daisyui": "2.24.2", + "dayjs": "1.11.5", "js-cookie": "3.0.1", "p-limit": "4.0.0", + "svelte-file-dropzone": "^1.0.0", "svelte-select": "4.4.7", "sveltekit-i18n": "2.2.2" } diff --git a/apps/ui/src/lib/api.ts b/apps/ui/src/lib/api.ts index 697a29338..ed781b8a7 100644 --- a/apps/ui/src/lib/api.ts +++ b/apps/ui/src/lib/api.ts @@ -3,33 +3,35 @@ import Cookies from 'js-cookie'; export function getAPIUrl() { if (GITPOD_WORKSPACE_URL) { - const { href } = new URL(GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') - return newURL + const { href } = new URL(GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); + return newURL; } if (CODESANDBOX_HOST) { - return `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` + return `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; } - return dev ? 'http://localhost:3001' : 'http://localhost:3000'; + return dev + ? 'http://localhost:3001' + : 'http://localhost:3000'; } export function getWebhookUrl(type: string) { if (GITPOD_WORKSPACE_URL) { - const { href } = new URL(GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') + const { href } = new URL(GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); if (type === 'github') { - return `${newURL}/webhooks/github/events` + return `${newURL}/webhooks/github/events`; } if (type === 'gitlab') { - return `${newURL}/webhooks/gitlab/events` + return `${newURL}/webhooks/gitlab/events`; } } if (CODESANDBOX_HOST) { - const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` + const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; if (type === 'github') { - return `${newURL}/webhooks/github/events` + return `${newURL}/webhooks/github/events`; } if (type === 'gitlab') { - return `${newURL}/webhooks/gitlab/events` + return `${newURL}/webhooks/gitlab/events`; } } return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`; @@ -37,7 +39,7 @@ export function getWebhookUrl(type: string) { async function send({ method, path, - data = {}, + data = null, headers, timeout = 120000 }: { @@ -51,7 +53,7 @@ async function send({ const controller = new AbortController(); const id = setTimeout(() => controller.abort(), timeout); const opts: any = { method, headers: {}, body: null, signal: controller.signal }; - if (Object.keys(data).length > 0) { + if (data && Object.keys(data).length > 0) { const parsedData = data; for (const [key, value] of Object.entries(data)) { if (value === '') { @@ -83,7 +85,9 @@ async function send({ if (dev && !path.startsWith('https://')) { path = `${getAPIUrl()}${path}`; } - + if (method === 'POST' && data && !opts.body) { + opts.body = data; + } const response = await fetch(`${path}`, opts); clearTimeout(id); @@ -103,7 +107,11 @@ async function send({ return {}; } if (!response.ok) { - if (response.status === 401 && !path.startsWith('https://api.github') && !path.includes('/v4/user')) { + if ( + response.status === 401 && + !path.startsWith('https://api.github') && + !path.includes('/v4/user') + ) { Cookies.remove('token'); } @@ -126,7 +134,7 @@ export function del( export function post( path: string, - data: Record, + data: Record | FormData, headers?: Record ): Promise> { return send({ method: 'POST', path, data, headers }); diff --git a/apps/ui/src/lib/components/CopyPasswordField.svelte b/apps/ui/src/lib/components/CopyPasswordField.svelte index 0a6fba471..9083fa47c 100644 --- a/apps/ui/src/lib/components/CopyPasswordField.svelte +++ b/apps/ui/src/lib/components/CopyPasswordField.svelte @@ -13,8 +13,9 @@ export let id: string; export let name: string; export let placeholder = ''; + export let inputStyle = ''; - let disabledClass = 'bg-coolback disabled:bg-coolblack'; + let disabledClass = 'bg-coolback disabled:bg-coolblack w-full'; let isHttps = browser && window.location.protocol === 'https:'; function copyToClipboard() { @@ -32,6 +33,7 @@ {#if !isPasswordField || showPassword} {#if textarea}