From 95c942f477ca6b9e6a79575f2bdc1f9bc8f6be2d Mon Sep 17 00:00:00 2001 From: Kaname <56084970+kaname-png@users.noreply.github.com> Date: Tue, 6 Sep 2022 17:37:26 +0000 Subject: [PATCH 01/81] feat(layout): added drawer when user is in mobile --- apps/api/src/lib/common.ts | 753 +++++++++++++++++------------ apps/ui/src/lib/api.ts | 32 +- apps/ui/src/routes/__layout.svelte | 465 ++++++++++++++---- 3 files changed, 828 insertions(+), 422 deletions(-) diff --git a/apps/api/src/lib/common.ts b/apps/api/src/lib/common.ts index cba901255..4f4f28917 100644 --- a/apps/api/src/lib/common.ts +++ b/apps/api/src/lib/common.ts @@ -1,4 +1,4 @@ -import { exec } from 'node:child_process' +import { exec } from 'node:child_process'; import util from 'util'; import fs from 'fs/promises'; import yaml from 'js-yaml'; @@ -11,11 +11,11 @@ import { promises as dns } from 'dns'; import { PrismaClient } from '@prisma/client'; import cuid from 'cuid'; import os from 'os'; -import sshConfig from 'ssh-config' +import sshConfig from 'ssh-config'; import { checkContainer, removeContainer } from './docker'; import { day } from './dayjs'; -import * as serviceFields from './services/serviceFields' +import * as serviceFields from './services/serviceFields'; import { saveBuildLog } from './buildPacks/common'; import { scheduler } from './scheduler'; import { supportedServiceTypesAndVersions } from './services/supportedVersions'; @@ -38,26 +38,28 @@ export const defaultProxyImageHttp = `coolify-haproxy-http-alpine:latest`; export const defaultTraefikImage = `traefik:v2.8`; export function getAPIUrl() { if (process.env.GITPOD_WORKSPACE_URL) { - const { href } = new URL(process.env.GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') - return newURL + const { href } = new URL(process.env.GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); + return newURL; } if (process.env.CODESANDBOX_HOST) { - return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3001')}` + return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; } - return isDev ? 'http://localhost:3001' : 'http://localhost:3000'; + return isDev + ? 'https://kaname-png-coolify-vrgj9w6jq5x2xxxv-3001.githubpreview.dev' + : 'https://kaname-png-coolify-vrgj9w6jq5x2xxxv-3000.githubpreview.dev'; } export function getUIUrl() { if (process.env.GITPOD_WORKSPACE_URL) { - const { href } = new URL(process.env.GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3000-').replace(/\/$/, '') - return newURL + const { href } = new URL(process.env.GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3000-').replace(/\/$/, ''); + return newURL; } if (process.env.CODESANDBOX_HOST) { - return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3000')}` + return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3000')}`; } - return 'http://localhost:3000'; + return 'https://kaname-png-coolify-vrgj9w6jq5x2xxxv-3000.githubpreview.dev'; } const mainTraefikEndpoint = isDev @@ -68,17 +70,30 @@ const otherTraefikEndpoint = isDev ? `${getAPIUrl()}/webhooks/traefik/other.json` : 'http://coolify:3000/webhooks/traefik/other.json'; - export const uniqueName = (): string => uniqueNamesGenerator(customConfig); export const asyncExecShell = util.promisify(exec); -export const asyncExecShellStream = async ({ debug, buildId, applicationId, command, engine }: { debug: boolean, buildId: string, applicationId: string, command: string, engine: string }) => { +export const asyncExecShellStream = async ({ + debug, + buildId, + applicationId, + command, + engine +}: { + debug: boolean; + buildId: string; + applicationId: string; + command: string; + engine: string; +}) => { return await new Promise(async (resolve, reject) => { - const { execaCommand } = await import('execa') - const subprocess = execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine } }) + const { execaCommand } = await import('execa'); + const subprocess = execaCommand(command, { + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + }); if (debug) { subprocess.stdout.on('data', async (data) => { const stdout = data.toString(); - const array = stdout.split('\n') + const array = stdout.split('\n'); for (const line of array) { if (line !== '\n' && line !== '') { await saveBuildLog({ @@ -88,10 +103,10 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm }); } } - }) + }); subprocess.stderr.on('data', async (data) => { const stderr = data.toString(); - const array = stderr.split('\n') + const array = stderr.split('\n'); for (const line of array) { if (line !== '\n' && line !== '') { await saveBuildLog({ @@ -101,23 +116,23 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm }); } } - }) + }); } subprocess.on('exit', async (code) => { await asyncSleep(1000); if (code === 0) { - resolve(code) + resolve(code); } else { - reject(code) + reject(code); } - }) - }) -} + }); + }); +}; export const asyncSleep = (delay: number): Promise => new Promise((resolve) => setTimeout(resolve, delay)); export const prisma = new PrismaClient({ - errorFormat: 'minimal', + errorFormat: 'minimal' // log: [ // { // emit: 'event', @@ -165,10 +180,9 @@ export const decrypt = (hashString: string) => { ]); return decrpyted.toString(); } catch (error) { - console.log({ decryptionError: error.message }) - return hashString + console.log({ decryptionError: error.message }); + return hashString; } - } }; export const encrypt = (text: string) => { @@ -183,8 +197,6 @@ export const encrypt = (text: string) => { } }; - - export async function checkDoubleBranch(branch: string, projectId: number): Promise { const applications = await prisma.application.findMany({ where: { branch, projectId } }); return applications.length > 1; @@ -203,7 +215,7 @@ export async function isDNSValid(hostname: any, domain: string): Promise { resolves = await dns.resolve4(hostname); } } catch (error) { - throw 'Invalid DNS.' + throw 'Invalid DNS.'; } try { @@ -218,11 +230,10 @@ export async function isDNSValid(hostname: any, domain: string): Promise { } if (!ipDomainFound) throw false; } catch (error) { - throw 'DNS not set' + throw 'DNS not set'; } } - export function getDomain(domain: string): string { return domain?.replace('https://', '').replace('http://', ''); } @@ -248,7 +259,7 @@ export async function isDomainConfigured({ ], id: { not: id }, destinationDocker: { - remoteIpAddress, + remoteIpAddress } }, select: { fqdn: true } @@ -284,7 +295,10 @@ export async function isDomainConfigured({ export async function getContainerUsage(dockerId: string, container: string): Promise { try { - const { stdout } = await executeDockerCmd({ dockerId, command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"` }) + const { stdout } = await executeDockerCmd({ + dockerId, + command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"` + }); return JSON.parse(stdout); } catch (err) { return { @@ -313,7 +327,7 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P resolves = await dns.resolve4(hostname); } } catch (error) { - throw { status: 500, message: `Could not determine IP address for ${hostname}.` } + throw { status: 500, message: `Could not determine IP address for ${hostname}.` }; } if (dualCerts) { @@ -335,9 +349,15 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P } } if (ipDomainFound && ipDomainDualCertFound) return { status: 200 }; - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } catch (error) { - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } } else { try { @@ -349,9 +369,15 @@ export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): P } } if (ipDomainFound) return { status: 200 }; - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } catch (error) { - throw { status: 500, message: `DNS not set correctly or propogated.
Please check your DNS settings.` } + throw { + status: 500, + message: `DNS not set correctly or propogated.
Please check your DNS settings.` + }; } } } @@ -419,63 +445,85 @@ export const supportedDatabaseTypesAndVersions = [ export async function getFreeSSHLocalPort(id: string): Promise { const { default: isReachable } = await import('is-port-reachable'); - const { remoteIpAddress, sshLocalPort } = await prisma.destinationDocker.findUnique({ where: { id } }) + const { remoteIpAddress, sshLocalPort } = await prisma.destinationDocker.findUnique({ + where: { id } + }); if (sshLocalPort) { - return Number(sshLocalPort) + return Number(sshLocalPort); } const data = await prisma.setting.findFirst(); const { minPort, maxPort } = data; - const ports = await prisma.destinationDocker.findMany({ where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } } }) + const ports = await prisma.destinationDocker.findMany({ + where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } } + }); const alreadyConfigured = await prisma.destinationDocker.findFirst({ where: { - remoteIpAddress, id: { not: id }, sshLocalPort: { not: null } + remoteIpAddress, + id: { not: id }, + sshLocalPort: { not: null } } - }) + }); if (alreadyConfigured?.sshLocalPort) { - await prisma.destinationDocker.update({ where: { id }, data: { sshLocalPort: alreadyConfigured.sshLocalPort } }) - return Number(alreadyConfigured.sshLocalPort) + await prisma.destinationDocker.update({ + where: { id }, + data: { sshLocalPort: alreadyConfigured.sshLocalPort } + }); + return Number(alreadyConfigured.sshLocalPort); } - const range = generateRangeArray(minPort, maxPort) - const availablePorts = range.filter(port => !ports.map(p => p.sshLocalPort).includes(port)) + const range = generateRangeArray(minPort, maxPort); + const availablePorts = range.filter((port) => !ports.map((p) => p.sshLocalPort).includes(port)); for (const port of availablePorts) { - const found = await isReachable(port, { host: 'localhost' }) + const found = await isReachable(port, { host: 'localhost' }); if (!found) { - await prisma.destinationDocker.update({ where: { id }, data: { sshLocalPort: Number(port) } }) - return Number(port) + await prisma.destinationDocker.update({ + where: { id }, + data: { sshLocalPort: Number(port) } + }); + return Number(port); } } - return false + return false; } export async function createRemoteEngineConfiguration(id: string) { const homedir = os.homedir(); - const sshKeyFile = `/tmp/id_rsa-${id}` + const sshKeyFile = `/tmp/id_rsa-${id}`; const localPort = await getFreeSSHLocalPort(id); - const { sshKey: { privateKey }, remoteIpAddress, remotePort, remoteUser } = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } }) - await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 }) + const { + sshKey: { privateKey }, + remoteIpAddress, + remotePort, + remoteUser + } = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } }); + await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 }); // Needed for remote docker compose - const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell(`ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l`) + const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell( + `ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l` + ); if (numberOfSSHAgentsRunning !== '' && Number(numberOfSSHAgentsRunning.trim()) == 0) { try { - await fs.stat(`/tmp/coolify-ssh-agent.pid`) - await fs.rm(`/tmp/coolify-ssh-agent.pid`) - } catch (error) { } - await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`) + await fs.stat(`/tmp/coolify-ssh-agent.pid`); + await fs.rm(`/tmp/coolify-ssh-agent.pid`); + } catch (error) {} + await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`); } - await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`) + await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`); - const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell(`ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l`) + const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell( + `ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l` + ); if (numberOfSSHTunnelsRunning !== '' && Number(numberOfSSHTunnelsRunning.trim()) == 0) { try { - await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}`) - } catch (error) { } - + await asyncExecShell( + `SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}` + ); + } catch (error) {} } - const config = sshConfig.parse('') - const found = config.find({ Host: remoteIpAddress }) + const config = sshConfig.parse(''); + const found = config.find({ Host: remoteIpAddress }); if (!found) { config.append({ Host: remoteIpAddress, @@ -484,58 +532,79 @@ export async function createRemoteEngineConfiguration(id: string) { User: remoteUser, IdentityFile: sshKeyFile, StrictHostKeyChecking: 'no' - }) + }); } try { - await fs.stat(`${homedir}/.ssh/`) + await fs.stat(`${homedir}/.ssh/`); } catch (error) { - await fs.mkdir(`${homedir}/.ssh/`) + await fs.mkdir(`${homedir}/.ssh/`); } - return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config)) + return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config)); } -export async function executeDockerCmd({ debug, buildId, applicationId, dockerId, command }: { debug?: boolean, buildId?: string, applicationId?: string, dockerId: string, command: string }): Promise { - let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } }) +export async function executeDockerCmd({ + debug, + buildId, + applicationId, + dockerId, + command +}: { + debug?: boolean; + buildId?: string; + applicationId?: string; + dockerId: string; + command: string; +}): Promise { + let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ + where: { id: dockerId } + }); if (remoteEngine) { - await createRemoteEngineConfiguration(dockerId) - engine = `ssh://${remoteIpAddress}` + await createRemoteEngineConfiguration(dockerId); + engine = `ssh://${remoteIpAddress}`; } else { - engine = 'unix:///var/run/docker.sock' + engine = 'unix:///var/run/docker.sock'; } if (process.env.CODESANDBOX_HOST) { if (command.startsWith('docker compose')) { - command = command.replace(/docker compose/gi, 'docker-compose') + command = command.replace(/docker compose/gi, 'docker-compose'); } } if (command.startsWith(`docker build --progress plain`)) { return await asyncExecShellStream({ debug, buildId, applicationId, command, engine }); } - return await asyncExecShell( - `DOCKER_BUILDKIT=1 DOCKER_HOST="${engine}" ${command}` - ); - + return await asyncExecShell(`DOCKER_BUILDKIT=1 DOCKER_HOST="${engine}" ${command}`); } export async function startTraefikProxy(id: string): Promise { - const { engine, network, remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id } }) + const { engine, network, remoteEngine, remoteIpAddress } = + await prisma.destinationDocker.findUnique({ where: { id } }); const found = await checkContainer({ dockerId: id, container: 'coolify-proxy', remove: true }); const { id: settingsId, ipv4, ipv6 } = await listSettings(); if (!found) { - const { stdout: coolifyNetwork } = await executeDockerCmd({ dockerId: id, command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"` }) + const { stdout: coolifyNetwork } = await executeDockerCmd({ + dockerId: id, + command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"` + }); if (!coolifyNetwork) { - await executeDockerCmd({ dockerId: id, command: `docker network create --attachable coolify-infra` }) + await executeDockerCmd({ + dockerId: id, + command: `docker network create --attachable coolify-infra` + }); } - const { stdout: Config } = await executeDockerCmd({ dockerId: id, command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` }) + const { stdout: Config } = await executeDockerCmd({ + dockerId: id, + command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` + }); const ip = JSON.parse(Config)[0].Gateway; - let traefikUrl = mainTraefikEndpoint + let traefikUrl = mainTraefikEndpoint; if (remoteEngine) { - let ip = null + let ip = null; if (isDev) { - ip = getAPIUrl() + ip = getAPIUrl(); } else { - ip = `http://${ipv4 || ipv6}:3000` + ip = `http://${ipv4 || ipv6}:3000`; } - traefikUrl = `${ip}/webhooks/traefik/remote/${id}` + traefikUrl = `${ip}/webhooks/traefik/remote/${id}`; } await executeDockerCmd({ dockerId: id, @@ -561,7 +630,7 @@ export async function startTraefikProxy(id: string): Promise { --certificatesresolvers.letsencrypt.acme.storage=/etc/traefik/acme/acme.json \ --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \ --log.level=error` - }) + }); await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } }); await prisma.destinationDocker.update({ where: { id }, @@ -585,15 +654,17 @@ export async function startTraefikProxy(id: string): Promise { } export async function configureNetworkTraefikProxy(destination: any): Promise { - const { id } = destination + const { id } = destination; const { stdout: networks } = await executeDockerCmd({ dockerId: id, - command: - `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'` + command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'` }); const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(','); if (!configuredNetworks.includes(destination.network)) { - await executeDockerCmd({ dockerId: destination.id, command: `docker network connect ${destination.network} coolify-proxy` }) + await executeDockerCmd({ + dockerId: destination.id, + command: `docker network connect ${destination.network} coolify-proxy` + }); } } @@ -611,10 +682,8 @@ export async function stopTraefikProxy( if (found) { await executeDockerCmd({ dockerId: id, - command: - `docker stop -t 0 coolify-proxy && docker rm coolify-proxy` + command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy` }); - } } catch (error) { return error; @@ -627,10 +696,13 @@ export async function listSettings(): Promise { return settings; } - -export function generatePassword({ length = 24, symbols = false, isHex = false }: { length?: number, symbols?: boolean, isHex?: boolean } | null): string { +export function generatePassword({ + length = 24, + symbols = false, + isHex = false +}: { length?: number; symbols?: boolean; isHex?: boolean } | null): string { if (isHex) { - return crypto.randomBytes(length).toString("hex"); + return crypto.randomBytes(length).toString('hex'); } const password = generator.generate({ length, @@ -642,95 +714,98 @@ export function generatePassword({ length = 24, symbols = false, isHex = false } return password; } -export function generateDatabaseConfiguration(database: any, arch: string): +export function generateDatabaseConfiguration( + database: any, + arch: string +): | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MYSQL_DATABASE: string; - MYSQL_PASSWORD: string; - MYSQL_ROOT_USER: string; - MYSQL_USER: string; - MYSQL_ROOT_PASSWORD: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MYSQL_DATABASE: string; + MYSQL_PASSWORD: string; + MYSQL_ROOT_USER: string; + MYSQL_USER: string; + MYSQL_ROOT_PASSWORD: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MONGO_INITDB_ROOT_USERNAME?: string; - MONGO_INITDB_ROOT_PASSWORD?: string; - MONGODB_ROOT_USER?: string; - MONGODB_ROOT_PASSWORD?: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MONGO_INITDB_ROOT_USERNAME?: string; + MONGO_INITDB_ROOT_PASSWORD?: string; + MONGODB_ROOT_USER?: string; + MONGODB_ROOT_PASSWORD?: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MARIADB_ROOT_USER: string; - MARIADB_ROOT_PASSWORD: string; - MARIADB_USER: string; - MARIADB_PASSWORD: string; - MARIADB_DATABASE: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MARIADB_ROOT_USER: string; + MARIADB_ROOT_PASSWORD: string; + MARIADB_USER: string; + MARIADB_PASSWORD: string; + MARIADB_DATABASE: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - POSTGRESQL_POSTGRES_PASSWORD: string; - POSTGRESQL_USERNAME: string; - POSTGRESQL_PASSWORD: string; - POSTGRESQL_DATABASE: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + POSTGRESQL_POSTGRES_PASSWORD: string; + POSTGRESQL_USERNAME: string; + POSTGRESQL_PASSWORD: string; + POSTGRESQL_DATABASE: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - POSTGRES_USER: string; - POSTGRES_PASSWORD: string; - POSTGRES_DB: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + POSTGRES_USER: string; + POSTGRES_PASSWORD: string; + POSTGRES_DB: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - REDIS_AOF_ENABLED: string; - REDIS_PASSWORD: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + REDIS_AOF_ENABLED: string; + REDIS_PASSWORD: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - COUCHDB_PASSWORD: string; - COUCHDB_USER: string; - }; - } { + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + COUCHDB_PASSWORD: string; + COUCHDB_USER: string; + }; + } { const { id, dbUser, @@ -756,11 +831,11 @@ export function generateDatabaseConfiguration(database: any, arch: string): image: `${baseImage}:${version}`, volume: `${id}-${type}-data:/bitnami/mysql/data`, ulimits: {} - } + }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/mysql`; } - return configuration + return configuration; } else if (type === 'mariadb') { const configuration = { privatePort: 3306, @@ -778,7 +853,7 @@ export function generateDatabaseConfiguration(database: any, arch: string): if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/mysql`; } - return configuration + return configuration; } else if (type === 'mongodb') { const configuration = { privatePort: 27017, @@ -794,10 +869,10 @@ export function generateDatabaseConfiguration(database: any, arch: string): configuration.environmentVariables = { MONGO_INITDB_ROOT_USERNAME: rootUser, MONGO_INITDB_ROOT_PASSWORD: rootUserPassword - } + }; configuration.volume = `${id}-${type}-data:/data/db`; } - return configuration + return configuration; } else if (type === 'postgresql') { const configuration = { privatePort: 5432, @@ -810,16 +885,16 @@ export function generateDatabaseConfiguration(database: any, arch: string): image: `${baseImage}:${version}`, volume: `${id}-${type}-data:/bitnami/postgresql`, ulimits: {} - } + }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/var/lib/postgresql`; configuration.environmentVariables = { POSTGRES_PASSWORD: dbUserPassword, POSTGRES_USER: dbUser, POSTGRES_DB: defaultDatabase - } + }; } - return configuration + return configuration; } else if (type === 'redis') { const configuration = { privatePort: 6379, @@ -834,9 +909,11 @@ export function generateDatabaseConfiguration(database: any, arch: string): }; if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/data`; - configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no'} --requirepass ${dbUserPassword}`; + configuration.command = `/usr/local/bin/redis-server --appendonly ${ + appendOnly ? 'yes' : 'no' + } --requirepass ${dbUserPassword}`; } - return configuration + return configuration; } else if (type === 'couchdb') { const configuration = { privatePort: 5984, @@ -851,20 +928,20 @@ export function generateDatabaseConfiguration(database: any, arch: string): if (isARM(arch)) { configuration.volume = `${id}-${type}-data:/opt/couchdb/data`; } - return configuration + return configuration; } } export function isARM(arch: string) { if (arch === 'arm' || arch === 'arm64') { - return true + return true; } - return false + return false; } export function getDatabaseImage(type: string, arch: string): string { const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type); if (found) { if (isARM(arch)) { - return found.baseImageARM || found.baseImage + return found.baseImageARM || found.baseImage; } return found.baseImage; } @@ -875,14 +952,13 @@ export function getDatabaseVersions(type: string, arch: string): string[] { const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type); if (found) { if (isARM(arch)) { - return found.versionsARM || found.versions + return found.versionsARM || found.versions; } return found.versions; } return []; } - export type ComposeFile = { version: ComposerFileVersion; services: Record; @@ -904,11 +980,13 @@ export type ComposeFileService = { depends_on?: string[]; command?: string; ports?: string[]; - build?: { - context: string; - dockerfile: string; - args?: Record; - } | string; + build?: + | { + context: string; + dockerfile: string; + args?: Record; + } + | string; deploy?: { restart_policy?: { condition?: string; @@ -977,7 +1055,7 @@ export const createDirectories = async ({ let workdirFound = false; try { workdirFound = !!(await fs.stat(workdir)); - } catch (error) { } + } catch (error) {} if (workdirFound) { await asyncExecShell(`rm -fr ${workdir}`); } @@ -988,10 +1066,7 @@ export const createDirectories = async ({ }; }; - -export async function stopDatabaseContainer( - database: any -): Promise { +export async function stopDatabaseContainer(database: any): Promise { let everStarted = false; const { id, @@ -1000,7 +1075,10 @@ export async function stopDatabaseContainer( } = database; if (destinationDockerId) { try { - const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` }) + const { stdout } = await executeDockerCmd({ + dockerId, + command: `docker inspect --format '{{json .State}}' ${id}` + }); if (stdout) { everStarted = true; @@ -1013,7 +1091,6 @@ export async function stopDatabaseContainer( return everStarted; } - export async function stopTcpHttpProxy( id: string, destinationDocker: any, @@ -1028,17 +1105,14 @@ export async function stopTcpHttpProxy( if (found) { return await executeDockerCmd({ dockerId, - command: - `docker stop -t 0 ${container} && docker rm ${container}` + command: `docker stop -t 0 ${container} && docker rm ${container}` }); - } } catch (error) { return error; } } - export async function updatePasswordInDb(database, user, newPassword, isRoot) { const { id, @@ -1056,55 +1130,64 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) { await executeDockerCmd({ dockerId, command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"` - }) + }); } else if (type === 'mariadb') { await executeDockerCmd({ dockerId, command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"` - }) - + }); } else if (type === 'postgresql') { if (isRoot) { await executeDockerCmd({ dockerId, command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"` - }) + }); } else { await executeDockerCmd({ dockerId, command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"` - }) + }); } } else if (type === 'mongodb') { await executeDockerCmd({ dockerId, command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"` - }) - + }); } else if (type === 'redis') { await executeDockerCmd({ dockerId, command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}` - }) - + }); } } } -export async function checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress }: { id: string, configuredPort?: number, exposePort: number, dockerId: string, remoteIpAddress?: string }) { +export async function checkExposedPort({ + id, + configuredPort, + exposePort, + dockerId, + remoteIpAddress +}: { + id: string; + configuredPort?: number; + exposePort: number; + dockerId: string; + remoteIpAddress?: string; +}) { if (exposePort < 1024 || exposePort > 65535) { - throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` } + throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` }; } if (configuredPort) { if (configuredPort !== exposePort) { const availablePort = await getFreeExposedPort(id, exposePort, dockerId, remoteIpAddress); if (availablePort.toString() !== exposePort.toString()) { - throw { status: 500, message: `Port ${exposePort} is already in use.` } + throw { status: 500, message: `Port ${exposePort} is already in use.` }; } } } else { const availablePort = await getFreeExposedPort(id, exposePort, dockerId, remoteIpAddress); if (availablePort.toString() !== exposePort.toString()) { - throw { status: 500, message: `Port ${exposePort} is already in use.` } + throw { status: 500, message: `Port ${exposePort} is already in use.` }; } } } @@ -1124,17 +1207,16 @@ export async function getFreeExposedPort(id, exposePort, dockerId, remoteIpAddre ).map((a) => a.exposePort); const usedPorts = [...applicationUsed, ...serviceUsed]; if (usedPorts.includes(exposePort)) { - return false + return false; } const found = await checkPort(exposePort, { host: remoteIpAddress || 'localhost' }); if (!found) { - return exposePort + return exposePort; } - return false - + return false; } export function generateRangeArray(start, end) { - return Array.from({ length: (end - start) }, (v, k) => k + start); + return Array.from({ length: end - start }, (v, k) => k + start); } export async function getFreePublicPort(id, dockerId) { const { default: isReachable } = await import('is-port-reachable'); @@ -1148,32 +1230,44 @@ export async function getFreePublicPort(id, dockerId) { ).map((a) => a.publicPort); const wpFtpUsed = await ( await prisma.wordpress.findMany({ - where: { ftpPublicPort: { not: null }, id: { not: id }, service: { destinationDockerId: dockerId } }, + where: { + ftpPublicPort: { not: null }, + id: { not: id }, + service: { destinationDockerId: dockerId } + }, select: { ftpPublicPort: true } }) ).map((a) => a.ftpPublicPort); const wpUsed = await ( await prisma.wordpress.findMany({ - where: { mysqlPublicPort: { not: null }, id: { not: id }, service: { destinationDockerId: dockerId } }, + where: { + mysqlPublicPort: { not: null }, + id: { not: id }, + service: { destinationDockerId: dockerId } + }, select: { mysqlPublicPort: true } }) ).map((a) => a.mysqlPublicPort); const minioUsed = await ( await prisma.minio.findMany({ - where: { publicPort: { not: null }, id: { not: id }, service: { destinationDockerId: dockerId } }, + where: { + publicPort: { not: null }, + id: { not: id }, + service: { destinationDockerId: dockerId } + }, select: { publicPort: true } }) ).map((a) => a.publicPort); const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed]; - const range = generateRangeArray(minPort, maxPort) - const availablePorts = range.filter(port => !usedPorts.includes(port)) + const range = generateRangeArray(minPort, maxPort); + const availablePorts = range.filter((port) => !usedPorts.includes(port)); for (const port of availablePorts) { - const found = await isReachable(port, { host: 'localhost' }) + const found = await isReachable(port, { host: 'localhost' }); if (!found) { - return port + return port; } } - return false + return false; } export async function startTraefikTCPProxy( @@ -1190,24 +1284,28 @@ export async function startTraefikTCPProxy( let dependentId = id; if (type === 'wordpressftp') dependentId = `${id}-ftp`; - const foundDependentContainer = await checkContainer({ dockerId, container: dependentId, remove: true }); + const foundDependentContainer = await checkContainer({ + dockerId, + container: dependentId, + remove: true + }); try { if (foundDependentContainer && !found) { const { stdout: Config } = await executeDockerCmd({ dockerId, command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'` - }) + }); const ip = JSON.parse(Config)[0].Gateway; - let traefikUrl = otherTraefikEndpoint + let traefikUrl = otherTraefikEndpoint; if (remoteEngine) { - let ip = null + let ip = null; if (isDev) { - ip = getAPIUrl() + ip = getAPIUrl(); } else { - ip = `http://${ipv4 || ipv6}:3000` + ip = `http://${ipv4 || ipv6}:3000`; } - traefikUrl = `${ip}/webhooks/traefik/other.json` + traefikUrl = `${ip}/webhooks/traefik/other.json`; } const tcpProxy = { version: '3.8', @@ -1243,28 +1341,34 @@ export async function startTraefikTCPProxy( await executeDockerCmd({ dockerId, command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d` - }) + }); await fs.rm(`/tmp/docker-compose-${id}.yaml`); } if (!foundDependentContainer && found) { await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${container} && docker rm ${container}` - }) + }); } } catch (error) { return error; } } -export async function getServiceFromDB({ id, teamId }: { id: string; teamId: string }): Promise { +export async function getServiceFromDB({ + id, + teamId +}: { + id: string; + teamId: string; +}): Promise { const settings = await prisma.setting.findFirst(); const body = await prisma.service.findFirst({ where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, include: includeServices }); - let { type } = body - type = fixType(type) + let { type } = body; + type = fixType(type); if (body?.serviceSecret.length > 0) { body.serviceSecret = body.serviceSecret.map((s) => { @@ -1273,7 +1377,7 @@ export async function getServiceFromDB({ id, teamId }: { id: string; teamId: str }); } - body[type] = { ...body[type], ...getUpdateableFields(type, body[type]) } + body[type] = { ...body[type], ...getUpdateableFields(type, body[type]) }; return { ...body, settings }; } @@ -1297,52 +1401,52 @@ export function saveUpdateableFields(type: string, data: any) { const update = {}; if (type && serviceFields[type]) { serviceFields[type].map((k) => { - let temp = data[k.name] + let temp = data[k.name]; if (temp) { if (k.isEncrypted) { - temp = encrypt(temp) + temp = encrypt(temp); } if (k.isLowerCase) { - temp = temp.toLowerCase() + temp = temp.toLowerCase(); } if (k.isNumber) { - temp = Number(temp) + temp = Number(temp); } if (k.isBoolean) { - temp = Boolean(temp) + temp = Boolean(temp); } } if (k.isNumber && temp === '') { - temp = null + temp = null; } - update[k.name] = temp + update[k.name] = temp; }); } - return update + return update; } export function getUpdateableFields(type: string, data: any) { const update = {}; if (type && serviceFields[type]) { serviceFields[type].map((k) => { - let temp = data[k.name] + let temp = data[k.name]; if (temp) { if (k.isEncrypted) { - temp = decrypt(temp) + temp = decrypt(temp); } - update[k.name] = temp + update[k.name] = temp; } - update[k.name] = temp + update[k.name] = temp; }); } - return update + return update; } export function fixType(type) { // Hack to fix the type case sensitivity... if (type === 'plausibleanalytics') type = 'plausibleAnalytics'; if (type === 'meilisearch') type = 'meiliSearch'; - return type + return type; } export const getServiceMainPort = (service: string) => { @@ -1353,7 +1457,6 @@ export const getServiceMainPort = (service: string) => { return null; }; - export function makeLabelForServices(type) { return [ 'coolify.managed=true', @@ -1362,8 +1465,14 @@ export function makeLabelForServices(type) { `coolify.service.type=${type}` ]; } -export function errorHandler({ status = 500, message = 'Unknown error.' }: { status: number, message: string | any }) { - if (message.message) message = message.message +export function errorHandler({ + status = 500, + message = 'Unknown error.' +}: { + status: number; + message: string | any; +}) { + if (message.message) message = message.message; throw { status, message }; } export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> { @@ -1384,8 +1493,12 @@ export async function generateSshKeyPair(): Promise<{ publicKey: string; private export async function stopBuild(buildId, applicationId) { let count = 0; await new Promise(async (resolve, reject) => { - const { destinationDockerId, status } = await prisma.build.findFirst({ where: { id: buildId } }); - const { id: dockerId } = await prisma.destinationDocker.findFirst({ where: { id: destinationDockerId } }); + const { destinationDockerId, status } = await prisma.build.findFirst({ + where: { id: buildId } + }); + const { id: dockerId } = await prisma.destinationDocker.findFirst({ + where: { id: destinationDockerId } + }); const interval = setInterval(async () => { try { if (status === 'failed' || status === 'canceled') { @@ -1395,12 +1508,15 @@ export async function stopBuild(buildId, applicationId) { if (count > 15) { clearInterval(interval); if (scheduler.workers.has('deployApplication')) { - scheduler.workers.get('deployApplication').postMessage('cancel') + scheduler.workers.get('deployApplication').postMessage('cancel'); } await cleanupDB(buildId, applicationId); return reject(new Error('Deployment canceled.')); } - const { stdout: buildContainers } = await executeDockerCmd({ dockerId, command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` }) + const { stdout: buildContainers } = await executeDockerCmd({ + dockerId, + command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'` + }); if (buildContainers) { const containersArray = buildContainers.trim().split('\n'); for (const container of containersArray) { @@ -1410,7 +1526,7 @@ export async function stopBuild(buildId, applicationId) { await removeContainer({ id, dockerId }); clearInterval(interval); if (scheduler.workers.has('deployApplication')) { - scheduler.workers.get('deployApplication').postMessage('cancel') + scheduler.workers.get('deployApplication').postMessage('cancel'); } await cleanupDB(buildId, applicationId); return resolve(); @@ -1418,7 +1534,7 @@ export async function stopBuild(buildId, applicationId) { } } count++; - } catch (error) { } + } catch (error) {} }, 100); }); } @@ -1433,38 +1549,44 @@ async function cleanupDB(buildId: string, applicationId: string) { export function convertTolOldVolumeNames(type) { if (type === 'nocodb') { - return 'nc' + return 'nc'; } } export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) { // Cleanup old coolify images try { - let { stdout: images } = await executeDockerCmd({ dockerId, command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r` }) + let { stdout: images } = await executeDockerCmd({ + dockerId, + command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r` + }); images = images.trim(); if (images) { - await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` }) + await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` }); } - } catch (error) { } + } catch (error) {} if (lowDiskSpace || force) { if (isDev) { if (!force) console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`); - return + return; } try { - await executeDockerCmd({ dockerId, command: `docker container prune -f --filter "label=coolify.managed=true"` }) - } catch (error) { } + await executeDockerCmd({ + dockerId, + command: `docker container prune -f --filter "label=coolify.managed=true"` + }); + } catch (error) {} try { - await executeDockerCmd({ dockerId, command: `docker image prune -f` }) - } catch (error) { } + await executeDockerCmd({ dockerId, command: `docker image prune -f` }); + } catch (error) {} try { - await executeDockerCmd({ dockerId, command: `docker image prune -a -f` }) - } catch (error) { } + await executeDockerCmd({ dockerId, command: `docker image prune -a -f` }); + } catch (error) {} // Cleanup build caches try { - await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` }) - } catch (error) { } + await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` }); + } catch (error) {} } } @@ -1477,7 +1599,6 @@ export function persistentVolumes(id, persistentStorage, config) { volumeSet.add(volume); } } - } } const volumesArray = Array.from(volumeSet); @@ -1486,21 +1607,21 @@ export function persistentVolumes(id, persistentStorage, config) { return `${id}${storage.path.replace(/\//gi, '-')}:${storage.path}`; }) || []; - let volumes = [...persistentVolume] - if (volumesArray) volumes = [...volumesArray, ...volumes] - const composeVolumes = volumes.length > 0 && volumes.map((volume) => { - return { - [`${volume.split(':')[0]}`]: { - name: volume.split(':')[0] - } - }; - }) || [] + let volumes = [...persistentVolume]; + if (volumesArray) volumes = [...volumesArray, ...volumes]; + const composeVolumes = + (volumes.length > 0 && + volumes.map((volume) => { + return { + [`${volume.split(':')[0]}`]: { + name: volume.split(':')[0] + } + }; + })) || + []; - const volumeMounts = Object.assign( - {}, - ...composeVolumes - ) || {} - return { volumeMounts } + const volumeMounts = Object.assign({}, ...composeVolumes) || {}; + return { volumeMounts }; } export function defaultComposeConfiguration(network: string): any { return { @@ -1514,25 +1635,29 @@ export function defaultComposeConfiguration(network: string): any { window: '120s' } } - } + }; } export function decryptApplication(application: any) { if (application) { if (application?.gitSource?.githubApp?.clientSecret) { - application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null; + application.gitSource.githubApp.clientSecret = + decrypt(application.gitSource.githubApp.clientSecret) || null; } if (application?.gitSource?.githubApp?.webhookSecret) { - application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null; + application.gitSource.githubApp.webhookSecret = + decrypt(application.gitSource.githubApp.webhookSecret) || null; } if (application?.gitSource?.githubApp?.privateKey) { - application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null; + application.gitSource.githubApp.privateKey = + decrypt(application.gitSource.githubApp.privateKey) || null; } if (application?.gitSource?.gitlabApp?.appSecret) { - application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null; + application.gitSource.gitlabApp.appSecret = + decrypt(application.gitSource.gitlabApp.appSecret) || null; } if (application?.secrets.length > 0) { application.secrets = application.secrets.map((s: any) => { - s.value = decrypt(s.value) || null + s.value = decrypt(s.value) || null; return s; }); } diff --git a/apps/ui/src/lib/api.ts b/apps/ui/src/lib/api.ts index 697a29338..bc7b4d9ac 100644 --- a/apps/ui/src/lib/api.ts +++ b/apps/ui/src/lib/api.ts @@ -3,33 +3,35 @@ import Cookies from 'js-cookie'; export function getAPIUrl() { if (GITPOD_WORKSPACE_URL) { - const { href } = new URL(GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') - return newURL + const { href } = new URL(GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); + return newURL; } if (CODESANDBOX_HOST) { - return `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` + return `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; } - return dev ? 'http://localhost:3001' : 'http://localhost:3000'; + return dev + ? 'https://kaname-png-coolify-vrgj9w6jq5x2xxxv-3001.githubpreview.dev' + : 'https://kaname-png-coolify-vrgj9w6jq5x2xxxv-3000.githubpreview.dev'; } export function getWebhookUrl(type: string) { if (GITPOD_WORKSPACE_URL) { - const { href } = new URL(GITPOD_WORKSPACE_URL) - const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') + const { href } = new URL(GITPOD_WORKSPACE_URL); + const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, ''); if (type === 'github') { - return `${newURL}/webhooks/github/events` + return `${newURL}/webhooks/github/events`; } if (type === 'gitlab') { - return `${newURL}/webhooks/gitlab/events` + return `${newURL}/webhooks/gitlab/events`; } } if (CODESANDBOX_HOST) { - const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` + const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`; if (type === 'github') { - return `${newURL}/webhooks/github/events` + return `${newURL}/webhooks/github/events`; } if (type === 'gitlab') { - return `${newURL}/webhooks/gitlab/events` + return `${newURL}/webhooks/gitlab/events`; } } return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`; @@ -103,7 +105,11 @@ async function send({ return {}; } if (!response.ok) { - if (response.status === 401 && !path.startsWith('https://api.github') && !path.includes('/v4/user')) { + if ( + response.status === 401 && + !path.startsWith('https://api.github') && + !path.includes('/v4/user') + ) { Cookies.remove('token'); } diff --git a/apps/ui/src/routes/__layout.svelte b/apps/ui/src/routes/__layout.svelte index 901aedea5..5701cfa21 100644 --- a/apps/ui/src/routes/__layout.svelte +++ b/apps/ui/src/routes/__layout.svelte @@ -120,26 +120,329 @@ {/if} -{#if $appSession.userId} - - {#if $appSession.whiteLabeled} - Powered by Coolify - {/if} -{/if} -
-
- + +
-
+ Dashboard Applications Date: Tue, 6 Sep 2022 18:51:19 +0000 Subject: [PATCH 02/81] fix(routes): improve design of application page --- apps/ui/src/routes/__layout.svelte | 6 +- .../routes/applications/[id]/__layout.svelte | 767 ++++++++++-------- .../src/routes/applications/[id]/index.svelte | 43 +- 3 files changed, 435 insertions(+), 381 deletions(-) diff --git a/apps/ui/src/routes/__layout.svelte b/apps/ui/src/routes/__layout.svelte index 5701cfa21..df018a59d 100644 --- a/apps/ui/src/routes/__layout.svelte +++ b/apps/ui/src/routes/__layout.svelte @@ -124,7 +124,7 @@
{#if $appSession.userId} -