diff --git a/README.md b/README.md index d214d42b7..f342fd74a 100644 --- a/README.md +++ b/README.md @@ -13,10 +13,16 @@ ## How to install Installation is automated with the following command: ```bash -/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)" +wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh ``` -If you would like no questions during installation +If you would like no questions during installation: + +```bash +wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f +``` + +For more details goto the [docs](https://docs.coollabs.io/coolify/installation). ## Features @@ -77,6 +83,8 @@ ### One-click services - [LanguageTool](https://languagetool.org) - [n8n](https://n8n.io) - [Uptime Kuma](https://github.com/louislam/uptime-kuma) +- [MeiliSearch](https://github.com/meilisearch/meilisearch) +- [Umami](https://github.com/mikecao/umami) ## Migration from v1 diff --git a/package.json b/package.json index 9f38331ad..1cffb1afb 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "coolify", "description": "An open-source & self-hostable Heroku / Netlify alternative.", - "version": "2.5.2", + "version": "2.6.0", "license": "AGPL-3.0", "scripts": { "dev": "docker-compose -f docker-compose-dev.yaml up -d && cross-env NODE_ENV=development & svelte-kit dev --host 0.0.0.0", @@ -55,8 +55,8 @@ "svelte-check": "2.7.0", "svelte-preprocess": "4.10.6", "svelte-select": "4.4.7", - "tailwindcss": "3.0.24", "sveltekit-i18n": "2.1.2", + "tailwindcss": "3.0.24", "ts-node": "10.7.0", "tslib": "2.3.1", "typescript": "4.6.3" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9c608add7..8af565eeb 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -48,8 +48,8 @@ specifiers: svelte-kit-cookie-session: 2.1.3 svelte-preprocess: 4.10.6 svelte-select: 4.4.7 - tailwindcss: 3.0.24 sveltekit-i18n: 2.1.2 + tailwindcss: 3.0.24 tailwindcss-scrollbar: 0.1.0 ts-node: 10.7.0 tslib: 2.3.1 @@ -108,9 +108,9 @@ devDependencies: svelte-check: 2.7.0_postcss@8.4.12+svelte@3.47.0 svelte-preprocess: 4.10.6_41810887ae6c6d59323116f47e33fa38 svelte-select: 4.4.7 + sveltekit-i18n: 2.1.2_svelte@3.47.0 tailwindcss: 3.0.24_ts-node@10.7.0 ts-node: 10.7.0_de7c86b0cde507c63a0402da5b982bd3 - sveltekit-i18n: 2.1.2_svelte@3.46.4 tslib: 2.3.1 typescript: 4.6.3 @@ -424,7 +424,7 @@ packages: - supports-color dev: true - /@sveltekit-i18n/base/1.1.1_svelte@3.46.4: + /@sveltekit-i18n/base/1.1.1_svelte@3.47.0: resolution: { integrity: sha512-J/sMU0OwS3dCLOuilHMBqu8vZHuuXiNV9vFJx8Nb4/b5BlR/KCZ4bCXI8wZR02GHeCOYKZxWus07CM1scxa/jw== @@ -432,7 +432,7 @@ packages: peerDependencies: svelte: ^3.x dependencies: - svelte: 3.46.4 + svelte: 3.47.0 optionalDependencies: '@sveltekit-i18n/parser-default': 1.0.3 dev: true @@ -4977,7 +4977,7 @@ packages: engines: { node: '>= 8' } dev: true - /sveltekit-i18n/2.1.2_svelte@3.46.4: + /sveltekit-i18n/2.1.2_svelte@3.47.0: resolution: { integrity: sha512-s5YxcbNd2EWNZaZR1A4Drt8s53E4fpUkN4XIWd3VRpw1pihZVWssqmBW1qkjQ6AB0kiu1Qwule+vt1HkbQOjrg== @@ -4985,9 +4985,9 @@ packages: peerDependencies: svelte: ^3.x dependencies: - '@sveltekit-i18n/base': 1.1.1_svelte@3.46.4 + '@sveltekit-i18n/base': 1.1.1_svelte@3.47.0 '@sveltekit-i18n/parser-default': 1.0.3 - svelte: 3.46.4 + svelte: 3.47.0 dev: true /table/6.7.2: diff --git a/prisma/migrations/20220426125053_select_base_image/migration.sql b/prisma/migrations/20220426125053_select_base_image/migration.sql new file mode 100644 index 000000000..37209d82f --- /dev/null +++ b/prisma/migrations/20220426125053_select_base_image/migration.sql @@ -0,0 +1,3 @@ +-- AlterTable +ALTER TABLE "Application" ADD COLUMN "baseBuildImage" TEXT; +ALTER TABLE "Application" ADD COLUMN "baseImage" TEXT; diff --git a/prisma/migrations/20220427133656_hasura/migration.sql b/prisma/migrations/20220427133656_hasura/migration.sql new file mode 100644 index 000000000..c679ad0fb --- /dev/null +++ b/prisma/migrations/20220427133656_hasura/migration.sql @@ -0,0 +1,16 @@ +-- CreateTable +CREATE TABLE "Hasura" ( + "id" TEXT NOT NULL PRIMARY KEY, + "serviceId" TEXT NOT NULL, + "postgresqlUser" TEXT NOT NULL, + "postgresqlPassword" TEXT NOT NULL, + "postgresqlDatabase" TEXT NOT NULL, + "postgresqlPublicPort" INTEGER, + "graphQLAdminPassword" TEXT NOT NULL, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL, + CONSTRAINT "Hasura_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateIndex +CREATE UNIQUE INDEX "Hasura_serviceId_key" ON "Hasura"("serviceId"); diff --git a/prisma/migrations/20220429202516_fider/migration.sql b/prisma/migrations/20220429202516_fider/migration.sql new file mode 100644 index 000000000..a6d31a24d --- /dev/null +++ b/prisma/migrations/20220429202516_fider/migration.sql @@ -0,0 +1,25 @@ +-- CreateTable +CREATE TABLE "Fider" ( + "id" TEXT NOT NULL PRIMARY KEY, + "serviceId" TEXT NOT NULL, + "postgresqlUser" TEXT NOT NULL, + "postgresqlPassword" TEXT NOT NULL, + "postgresqlDatabase" TEXT NOT NULL, + "postgresqlPublicPort" INTEGER, + "jwtSecret" TEXT NOT NULL, + "emailNoreply" TEXT, + "emailMailgunApiKey" TEXT, + "emailMailgunDomain" TEXT, + "emailMailgunRegion" TEXT, + "emailSmtpHost" TEXT, + "emailSmtpPort" INTEGER, + "emailSmtpUser" TEXT, + "emailSmtpPassword" TEXT, + "emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL, + CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateIndex +CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId"); diff --git a/prisma/migrations/20220429214112_fider_correction/migration.sql b/prisma/migrations/20220429214112_fider_correction/migration.sql new file mode 100644 index 000000000..429524f72 --- /dev/null +++ b/prisma/migrations/20220429214112_fider_correction/migration.sql @@ -0,0 +1,29 @@ +-- RedefineTables +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_Fider" ( + "id" TEXT NOT NULL PRIMARY KEY, + "serviceId" TEXT NOT NULL, + "postgresqlUser" TEXT NOT NULL, + "postgresqlPassword" TEXT NOT NULL, + "postgresqlDatabase" TEXT NOT NULL, + "postgresqlPublicPort" INTEGER, + "jwtSecret" TEXT NOT NULL, + "emailNoreply" TEXT, + "emailMailgunApiKey" TEXT, + "emailMailgunDomain" TEXT, + "emailMailgunRegion" TEXT NOT NULL DEFAULT 'EU', + "emailSmtpHost" TEXT, + "emailSmtpPort" INTEGER, + "emailSmtpUser" TEXT, + "emailSmtpPassword" TEXT, + "emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL, + CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); +INSERT INTO "new_Fider" ("createdAt", "emailMailgunApiKey", "emailMailgunDomain", "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt") SELECT "createdAt", "emailMailgunApiKey", "emailMailgunDomain", coalesce("emailMailgunRegion", 'EU') AS "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt" FROM "Fider"; +DROP TABLE "Fider"; +ALTER TABLE "new_Fider" RENAME TO "Fider"; +CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId"); +PRAGMA foreign_key_check; +PRAGMA foreign_keys=ON; diff --git a/prisma/migrations/20220430111953_ssl_dns_check_settings/migration.sql b/prisma/migrations/20220430111953_ssl_dns_check_settings/migration.sql new file mode 100644 index 000000000..cf57379ca --- /dev/null +++ b/prisma/migrations/20220430111953_ssl_dns_check_settings/migration.sql @@ -0,0 +1,23 @@ +-- RedefineTables +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_Setting" ( + "id" TEXT NOT NULL PRIMARY KEY, + "fqdn" TEXT, + "isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false, + "dualCerts" BOOLEAN NOT NULL DEFAULT false, + "minPort" INTEGER NOT NULL DEFAULT 9000, + "maxPort" INTEGER NOT NULL DEFAULT 9100, + "proxyPassword" TEXT NOT NULL, + "proxyUser" TEXT NOT NULL, + "proxyHash" TEXT, + "isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false, + "isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL +); +INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting"; +DROP TABLE "Setting"; +ALTER TABLE "new_Setting" RENAME TO "Setting"; +CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn"); +PRAGMA foreign_key_check; +PRAGMA foreign_keys=ON; diff --git a/prisma/schema.prisma b/prisma/schema.prisma index 82ba9baa8..58e4fd8a2 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -19,6 +19,7 @@ model Setting { proxyUser String proxyHash String? isAutoUpdateEnabled Boolean @default(false) + isDNSCheckEnabled Boolean @default(true) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt } @@ -105,6 +106,8 @@ model Application { gitSource GitSource? @relation(fields: [gitSourceId], references: [id]) secrets Secret[] persistentStorage ApplicationPersistentStorage[] + baseImage String? + baseBuildImage String? } model ApplicationSettings { @@ -303,6 +306,8 @@ model Service { meiliSearch MeiliSearch? persistentStorage ServicePersistentStorage[] umami Umami? + hasura Hasura? + fider Fider? } model PlausibleAnalytics { @@ -401,3 +406,38 @@ model Umami { createdAt DateTime @default(now()) updatedAt DateTime @updatedAt } + +model Hasura { + id String @id @default(cuid()) + serviceId String @unique + postgresqlUser String + postgresqlPassword String + postgresqlDatabase String + postgresqlPublicPort Int? + graphQLAdminPassword String + service Service @relation(fields: [serviceId], references: [id]) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} + +model Fider { + id String @id @default(cuid()) + serviceId String @unique + postgresqlUser String + postgresqlPassword String + postgresqlDatabase String + postgresqlPublicPort Int? + jwtSecret String + emailNoreply String? + emailMailgunApiKey String? + emailMailgunDomain String? + emailMailgunRegion String @default("EU") + emailSmtpHost String? + emailSmtpPort Int? + emailSmtpUser String? + emailSmtpPassword String? + emailSmtpEnableStartTls Boolean @default(false) + service Service @relation(fields: [serviceId], references: [id]) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} diff --git a/src/lib/buildPacks/common.ts b/src/lib/buildPacks/common.ts index 05313a3d2..6ac518136 100644 --- a/src/lib/buildPacks/common.ts +++ b/src/lib/buildPacks/common.ts @@ -5,6 +5,19 @@ import { scanningTemplates } from '$lib/components/templates'; import { promises as fs } from 'fs'; import { staticDeployments } from '$lib/components/common'; +const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy']; +const nodeBased = [ + 'react', + 'vuejs', + 'svelte', + 'gatsby', + 'php', + 'astro', + 'eleventy', + 'node', + 'nestjs' +]; + export function makeLabelForStandaloneApplication({ applicationId, fqdn, @@ -104,11 +117,12 @@ export const setDefaultConfiguration = async (data) => { else if (buildPack === 'php') port = 80; else if (buildPack === 'python') port = 8000; } - if (!installCommand && buildPack !== 'static') + if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel') installCommand = template?.installCommand || 'yarn install'; - if (!startCommand && buildPack !== 'static') + if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel') startCommand = template?.startCommand || 'yarn start'; - if (!buildCommand && buildPack !== 'static') buildCommand = template?.buildCommand || null; + if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel') + buildCommand = template?.buildCommand || null; if (!publishDirectory) publishDirectory = template?.publishDirectory || null; if (baseDirectory) { if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`; @@ -137,7 +151,13 @@ export const setDefaultConfiguration = async (data) => { }; }; -export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId) { +export async function copyBaseConfigurationFiles( + buildPack, + workdir, + buildId, + applicationId, + baseImage +) { try { if (buildPack === 'php') { await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`); @@ -146,7 +166,7 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap buildId, applicationId }); - } else if (staticDeployments.includes(buildPack)) { + } else if (staticDeployments.includes(buildPack) && baseImage.includes('nginx')) { await fs.writeFile( `${workdir}/nginx.conf`, `user nginx; @@ -174,7 +194,7 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap include /etc/nginx/mime.types; default_type application/octet-stream; - + server { listen 80; server_name localhost; @@ -199,11 +219,6 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap } ` ); - await saveBuildLog({ - line: 'Copied default configuration file for Nginx.', - buildId, - applicationId - }); } } catch (error) { console.log(error); @@ -218,3 +233,215 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma startCommand?.includes('pnpm') ); } + +export function setDefaultBaseImage(buildPack) { + const nodeVersions = [ + { + value: 'node:lts', + label: 'node:lts' + }, + { + value: 'node:18', + label: 'node:18' + }, + { + value: 'node:17', + label: 'node:17' + }, + { + value: 'node:16', + label: 'node:16' + }, + { + value: 'node:14', + label: 'node:14' + }, + { + value: 'node:12', + label: 'node:12' + } + ]; + const staticVersions = [ + { + value: 'webdevops/nginx:alpine', + label: 'webdevops/nginx:alpine' + }, + { + value: 'webdevops/apache:alpine', + label: 'webdevops/apache:alpine' + } + ]; + const rustVersions = [ + { + value: 'rust:latest', + label: 'rust:latest' + }, + { + value: 'rust:1.60', + label: 'rust:1.60' + }, + { + value: 'rust:1.60-buster', + label: 'rust:1.60-buster' + }, + { + value: 'rust:1.60-bullseye', + label: 'rust:1.60-bullseye' + }, + { + value: 'rust:1.60-slim-buster', + label: 'rust:1.60-slim-buster' + }, + { + value: 'rust:1.60-slim-bullseye', + label: 'rust:1.60-slim-bullseye' + }, + { + value: 'rust:1.60-alpine3.14', + label: 'rust:1.60-alpine3.14' + }, + { + value: 'rust:1.60-alpine3.15', + label: 'rust:1.60-alpine3.15' + } + ]; + const phpVersions = [ + { + value: 'webdevops/php-apache:8.0', + label: 'webdevops/php-apache:8.0' + }, + { + value: 'webdevops/php-nginx:8.0', + label: 'webdevops/php-nginx:8.0' + }, + { + value: 'webdevops/php-apache:7.4', + label: 'webdevops/php-apache:7.4' + }, + { + value: 'webdevops/php-nginx:7.4', + label: 'webdevops/php-nginx:7.4' + }, + { + value: 'webdevops/php-apache:7.3', + label: 'webdevops/php-apache:7.3' + }, + { + value: 'webdevops/php-nginx:7.3', + label: 'webdevops/php-nginx:7.3' + }, + { + value: 'webdevops/php-apache:7.2', + label: 'webdevops/php-apache:7.2' + }, + { + value: 'webdevops/php-nginx:7.2', + label: 'webdevops/php-nginx:7.2' + }, + { + value: 'webdevops/php-apache:7.1', + label: 'webdevops/php-apache:7.1' + }, + { + value: 'webdevops/php-nginx:7.1', + label: 'webdevops/php-nginx:7.1' + }, + { + value: 'webdevops/php-apache:7.0', + label: 'webdevops/php-apache:7.0' + }, + { + value: 'webdevops/php-nginx:7.0', + label: 'webdevops/php-nginx:7.0' + }, + { + value: 'webdevops/php-apache:5.6', + label: 'webdevops/php-apache:5.6' + }, + { + value: 'webdevops/php-nginx:5.6', + label: 'webdevops/php-nginx:5.6' + }, + { + value: 'webdevops/php-apache:8.0-alpine', + label: 'webdevops/php-apache:8.0-alpine' + }, + { + value: 'webdevops/php-nginx:8.0-alpine', + label: 'webdevops/php-nginx:8.0-alpine' + }, + { + value: 'webdevops/php-apache:7.4-alpine', + label: 'webdevops/php-apache:7.4-alpine' + }, + { + value: 'webdevops/php-nginx:7.4-alpine', + label: 'webdevops/php-nginx:7.4-alpine' + }, + { + value: 'webdevops/php-apache:7.3-alpine', + label: 'webdevops/php-apache:7.3-alpine' + }, + { + value: 'webdevops/php-nginx:7.3-alpine', + label: 'webdevops/php-nginx:7.3-alpine' + }, + { + value: 'webdevops/php-apache:7.2-alpine', + label: 'webdevops/php-apache:7.2-alpine' + }, + { + value: 'webdevops/php-nginx:7.2-alpine', + label: 'webdevops/php-nginx:7.2-alpine' + }, + { + value: 'webdevops/php-apache:7.1-alpine', + label: 'webdevops/php-apache:7.1-alpine' + }, + { + value: 'webdevops/php-nginx:7.1-alpine', + label: 'webdevops/php-nginx:7.1-alpine' + } + ]; + + let payload = { + baseImage: null, + baseBuildImage: null, + baseImages: [], + baseBuildImages: [] + }; + if (nodeBased.includes(buildPack)) { + payload.baseImage = 'node:lts'; + payload.baseImages = nodeVersions; + payload.baseBuildImage = 'node:lts'; + payload.baseBuildImages = nodeVersions; + } + if (staticApps.includes(buildPack)) { + payload.baseImage = 'webdevops/nginx:alpine'; + payload.baseImages = staticVersions; + payload.baseBuildImage = 'node:lts'; + payload.baseBuildImages = nodeVersions; + } + if (buildPack === 'python') { + payload.baseImage = 'python:3-alpine'; + } + if (buildPack === 'rust') { + payload.baseImage = 'rust:latest'; + payload.baseBuildImage = 'rust:latest'; + payload.baseImages = rustVersions; + payload.baseBuildImages = rustVersions; + } + if (buildPack === 'deno') { + payload.baseImage = 'denoland/deno:latest'; + } + if (buildPack === 'php') { + payload.baseImage = 'webdevops/php-apache:8.0-alpine'; + payload.baseImages = phpVersions; + } + if (buildPack === 'laravel') { + payload.baseImage = 'webdevops/php-apache:8.0-alpine'; + payload.baseBuildImage = 'node:18'; + payload.baseBuildImages = nodeVersions; + } + return payload; +} diff --git a/src/lib/buildPacks/deno.ts b/src/lib/buildPacks/deno.ts index 2e5569438..b593596f7 100644 --- a/src/lib/buildPacks/deno.ts +++ b/src/lib/buildPacks/deno.ts @@ -2,8 +2,16 @@ import { buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image): Promise => { - const { workdir, port, baseDirectory, secrets, pullmergeRequestId, denoMainFile, denoOptions } = - data; + const { + workdir, + port, + baseDirectory, + secrets, + pullmergeRequestId, + denoMainFile, + denoOptions, + buildId + } = data; const Dockerfile: Array = []; let depsFound = false; @@ -14,7 +22,7 @@ const createDockerfile = async (data, image): Promise => { Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -45,8 +53,8 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'denoland/deno:latest'; - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/docker.ts b/src/lib/buildPacks/docker.ts index 1ddf81a60..3b032fc4b 100644 --- a/src/lib/buildPacks/docker.ts +++ b/src/lib/buildPacks/docker.ts @@ -24,6 +24,7 @@ export default async function ({ .toString() .trim() .split('\n'); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -42,6 +43,7 @@ export default async function ({ } }); } + await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n')); await buildImage({ applicationId, tag, workdir, docker, buildId, debug, dockerFileLocation }); } catch (error) { diff --git a/src/lib/buildPacks/gatsby.ts b/src/lib/buildPacks/gatsby.ts index 38989626d..cdf95f1dd 100644 --- a/src/lib/buildPacks/gatsby.ts +++ b/src/lib/buildPacks/gatsby.ts @@ -2,25 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, imageforBuild): Promise => { - const { applicationId, tag, workdir, publishDirectory } = data; + const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${imageforBuild}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`); - Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + if (baseImage.includes('nginx')) { + Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + } Dockerfile.push(`EXPOSE 80`); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); }; export default async function (data) { try { - const image = 'webdevops/nginx:alpine'; - const imageForBuild = 'node:lts'; - - await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await buildCacheImageWithNode(data, baseImage); + await createDockerfile(data, baseBuildImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/index.ts b/src/lib/buildPacks/index.ts index ac31afc16..163aa3a7d 100644 --- a/src/lib/buildPacks/index.ts +++ b/src/lib/buildPacks/index.ts @@ -14,6 +14,7 @@ import astro from './static'; import eleventy from './static'; import python from './python'; import deno from './deno'; +import laravel from './laravel'; export { node, @@ -31,5 +32,6 @@ export { astro, eleventy, python, - deno + deno, + laravel }; diff --git a/src/lib/buildPacks/laravel.ts b/src/lib/buildPacks/laravel.ts new file mode 100644 index 000000000..a83363dc0 --- /dev/null +++ b/src/lib/buildPacks/laravel.ts @@ -0,0 +1,40 @@ +import { buildCacheImageForLaravel, buildImage } from '$lib/docker'; +import { promises as fs } from 'fs'; + +const createDockerfile = async (data, image): Promise => { + const { workdir, applicationId, tag, buildId } = data; + const Dockerfile: Array = []; + + Dockerfile.push(`FROM ${image}`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); + Dockerfile.push('WORKDIR /app'); + Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`); + Dockerfile.push(`COPY --chown=application:application composer.* ./`); + Dockerfile.push(`COPY --chown=application:application database/ database/`); + Dockerfile.push( + `RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist` + ); + Dockerfile.push( + `COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/` + ); + Dockerfile.push( + `COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/` + ); + Dockerfile.push( + `COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json` + ); + Dockerfile.push(`COPY --chown=application:application . ./`); + Dockerfile.push(`EXPOSE 80`); + await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); +}; + +export default async function (data) { + const { baseImage, baseBuildImage } = data; + try { + await buildCacheImageForLaravel(data, baseBuildImage); + await createDockerfile(data, baseImage); + await buildImage(data); + } catch (error) { + throw error; + } +} diff --git a/src/lib/buildPacks/nestjs.ts b/src/lib/buildPacks/nestjs.ts index 915bdd3d7..b0bb6ba89 100644 --- a/src/lib/buildPacks/nestjs.ts +++ b/src/lib/buildPacks/nestjs.ts @@ -2,13 +2,13 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image): Promise => { - const { applicationId, tag, port, startCommand, workdir, baseDirectory } = data; + const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data; const Dockerfile: Array = []; const isPnpm = startCommand.includes('pnpm'); Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (isPnpm) { Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm'); Dockerfile.push('RUN pnpm add -g pnpm'); @@ -22,11 +22,9 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'node:lts'; - const imageForBuild = 'node:lts'; - - await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await buildCacheImageWithNode(data, baseBuildImage); + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/nextjs.ts b/src/lib/buildPacks/nextjs.ts index 0f58b3b84..77f5bc5f8 100644 --- a/src/lib/buildPacks/nextjs.ts +++ b/src/lib/buildPacks/nextjs.ts @@ -4,6 +4,7 @@ import { checkPnpm } from './common'; const createDockerfile = async (data, image): Promise => { const { + buildId, workdir, port, installCommand, @@ -17,7 +18,7 @@ const createDockerfile = async (data, image): Promise => { const isPnpm = checkPnpm(installCommand, buildCommand, startCommand); Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -50,8 +51,8 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'node:lts'; - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/node.ts b/src/lib/buildPacks/node.ts index 869e28b5d..9d8e643ef 100644 --- a/src/lib/buildPacks/node.ts +++ b/src/lib/buildPacks/node.ts @@ -11,14 +11,15 @@ const createDockerfile = async (data, image): Promise => { startCommand, baseDirectory, secrets, - pullmergeRequestId + pullmergeRequestId, + buildId } = data; const Dockerfile: Array = []; const isPnpm = checkPnpm(installCommand, buildCommand, startCommand); Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -50,8 +51,8 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'node:lts'; - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/nuxtjs.ts b/src/lib/buildPacks/nuxtjs.ts index bfa48bf73..9cd7e9674 100644 --- a/src/lib/buildPacks/nuxtjs.ts +++ b/src/lib/buildPacks/nuxtjs.ts @@ -11,13 +11,14 @@ const createDockerfile = async (data, image): Promise => { startCommand, baseDirectory, secrets, - pullmergeRequestId + pullmergeRequestId, + buildId } = data; const Dockerfile: Array = []; const isPnpm = checkPnpm(installCommand, buildCommand, startCommand); Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -49,8 +50,8 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'node:lts'; - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/php.ts b/src/lib/buildPacks/php.ts index cfb39d20a..b3c9651ce 100644 --- a/src/lib/buildPacks/php.ts +++ b/src/lib/buildPacks/php.ts @@ -2,7 +2,7 @@ import { buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image, htaccessFound): Promise => { - const { workdir, baseDirectory } = data; + const { workdir, baseDirectory, buildId } = data; const Dockerfile: Array = []; let composerFound = false; try { @@ -11,7 +11,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise => { } catch (error) {} Dockerfile.push(`FROM ${image}`); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push('WORKDIR /app'); Dockerfile.push(`COPY .${baseDirectory || ''} /app`); if (htaccessFound) { @@ -27,7 +27,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise => { }; export default async function (data) { - const { workdir, baseDirectory } = data; + const { workdir, baseDirectory, baseImage } = data; try { let htaccessFound = false; try { @@ -36,10 +36,7 @@ export default async function (data) { } catch (e) { // } - const image = htaccessFound - ? 'webdevops/php-apache:8.0-alpine' - : 'webdevops/php-nginx:8.0-alpine'; - await createDockerfile(data, image, htaccessFound); + await createDockerfile(data, baseImage, htaccessFound); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/python.ts b/src/lib/buildPacks/python.ts index 1c6bdf6bf..fc5e8738b 100644 --- a/src/lib/buildPacks/python.ts +++ b/src/lib/buildPacks/python.ts @@ -10,12 +10,13 @@ const createDockerfile = async (data, image): Promise => { pullmergeRequestId, pythonWSGI, pythonModule, - pythonVariable + pythonVariable, + buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -62,8 +63,8 @@ const createDockerfile = async (data, image): Promise => { export default async function (data) { try { - const image = 'python:3-alpine'; - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/react.ts b/src/lib/buildPacks/react.ts index 719f782eb..3b55bfa23 100644 --- a/src/lib/buildPacks/react.ts +++ b/src/lib/buildPacks/react.ts @@ -2,24 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image): Promise => { - const { applicationId, tag, workdir, publishDirectory } = data; + const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push('WORKDIR /app'); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`); - Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + if (baseImage.includes('nginx')) { + Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + } Dockerfile.push(`EXPOSE 80`); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); }; export default async function (data) { try { - const image = 'webdevops/nginx:alpine'; - const imageForBuild = 'node:lts'; - await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await buildCacheImageWithNode(data, baseBuildImage); + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/rust.ts b/src/lib/buildPacks/rust.ts index 586d63140..72f0c6273 100644 --- a/src/lib/buildPacks/rust.ts +++ b/src/lib/buildPacks/rust.ts @@ -4,11 +4,11 @@ import { promises as fs } from 'fs'; import TOML from '@iarna/toml'; const createDockerfile = async (data, image, name): Promise => { - const { workdir, port, applicationId, tag } = data; + const { workdir, port, applicationId, tag, buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`); Dockerfile.push(`COPY . .`); @@ -27,14 +27,12 @@ const createDockerfile = async (data, image, name): Promise => { export default async function (data) { try { - const { workdir } = data; - const image = 'rust:latest'; - const imageForBuild = 'rust:latest'; + const { workdir, baseImage, baseBuildImage } = data; const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`); const parsedToml: any = TOML.parse(cargoToml); const name = parsedToml.package.name; - await buildCacheImageWithCargo(data, imageForBuild); - await createDockerfile(data, image, name); + await buildCacheImageWithCargo(data, baseBuildImage); + await createDockerfile(data, baseImage, name); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/static.ts b/src/lib/buildPacks/static.ts index e9e7179d5..8f3c2c7d4 100644 --- a/src/lib/buildPacks/static.ts +++ b/src/lib/buildPacks/static.ts @@ -10,13 +10,15 @@ const createDockerfile = async (data, image): Promise => { baseDirectory, publishDirectory, secrets, - pullmergeRequestId + pullmergeRequestId, + baseImage, + buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -37,17 +39,18 @@ const createDockerfile = async (data, image): Promise => { } else { Dockerfile.push(`COPY .${baseDirectory || ''} ./`); } - Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + if (baseImage.includes('nginx')) { + Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + } Dockerfile.push(`EXPOSE 80`); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); }; export default async function (data) { try { - const image = 'webdevops/nginx:alpine'; - const imageForBuild = 'node:lts'; - if (data.buildCommand) await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + if (data.buildCommand) await buildCacheImageWithNode(data, baseBuildImage); + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/svelte.ts b/src/lib/buildPacks/svelte.ts index fbcf36abb..5604e7ed6 100644 --- a/src/lib/buildPacks/svelte.ts +++ b/src/lib/buildPacks/svelte.ts @@ -2,25 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image): Promise => { - const { applicationId, tag, workdir, publishDirectory } = data; + const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`); - Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + if (baseImage.includes('nginx')) { + Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + } Dockerfile.push(`EXPOSE 80`); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); }; export default async function (data) { try { - const image = 'webdevops/nginx:alpine'; - const imageForBuild = 'node:lts'; - - await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await buildCacheImageWithNode(data, baseBuildImage); + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/buildPacks/vuejs.ts b/src/lib/buildPacks/vuejs.ts index fa80ac435..5604e7ed6 100644 --- a/src/lib/buildPacks/vuejs.ts +++ b/src/lib/buildPacks/vuejs.ts @@ -2,24 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker'; import { promises as fs } from 'fs'; const createDockerfile = async (data, image): Promise => { - const { applicationId, tag, workdir, publishDirectory } = data; + const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${image}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`); - Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + if (baseImage.includes('nginx')) { + Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`); + } Dockerfile.push(`EXPOSE 80`); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); }; export default async function (data) { try { - const image = 'webdevops/nginx:alpine'; - const imageForBuild = 'node:lts'; - await buildCacheImageWithNode(data, imageForBuild); - await createDockerfile(data, image); + const { baseImage, baseBuildImage } = data; + await buildCacheImageWithNode(data, baseBuildImage); + await createDockerfile(data, baseImage); await buildImage(data); } catch (error) { throw error; diff --git a/src/lib/components/ServiceLinks.svelte b/src/lib/components/ServiceLinks.svelte index 980505119..b8bafc369 100644 --- a/src/lib/components/ServiceLinks.svelte +++ b/src/lib/components/ServiceLinks.svelte @@ -1,6 +1,7 @@ {#if service.type === 'plausibleanalytics'} @@ -57,4 +59,12 @@ +{:else if service.type === 'hasura'} + + + +{:else if service.type === 'fider'} + + + {/if} diff --git a/src/lib/components/common.ts b/src/lib/components/common.ts index 760653cfa..d6ff2f8ea 100644 --- a/src/lib/components/common.ts +++ b/src/lib/components/common.ts @@ -19,7 +19,7 @@ export const staticDeployments = [ 'astro', 'eleventy' ]; -export const notNodeDeployments = ['php', 'docker', 'rust', 'python', 'deno']; +export const notNodeDeployments = ['php', 'docker', 'rust', 'python', 'deno', 'laravel']; export function getDomain(domain) { return domain?.replace('https://', '').replace('http://', ''); @@ -191,5 +191,27 @@ export const supportedServiceTypesAndVersions = [ ports: { main: 3000 } + }, + { + name: 'hasura', + fancyName: 'Hasura', + baseImage: 'hasura/graphql-engine', + images: ['postgres:12-alpine'], + versions: ['latest', 'v2.5.1'], + recommendedVersion: 'v2.5.1', + ports: { + main: 8080 + } + }, + { + name: 'fider', + fancyName: 'Fider', + baseImage: 'getfider/fider', + images: ['postgres:12-alpine'], + versions: ['stable'], + recommendedVersion: 'stable', + ports: { + main: 3000 + } } ]; diff --git a/src/lib/components/svg/applications/Laravel.svelte b/src/lib/components/svg/applications/Laravel.svelte new file mode 100644 index 000000000..ab544a596 --- /dev/null +++ b/src/lib/components/svg/applications/Laravel.svelte @@ -0,0 +1,10 @@ +Logomark diff --git a/src/lib/components/svg/databases/Redis.svelte b/src/lib/components/svg/databases/Redis.svelte index 2571e601d..24a7dc797 100644 --- a/src/lib/components/svg/databases/Redis.svelte +++ b/src/lib/components/svg/databases/Redis.svelte @@ -4,9 +4,7 @@ + export let isAbsolute = false; + + + diff --git a/src/lib/components/svg/services/Hasura.svelte b/src/lib/components/svg/services/Hasura.svelte new file mode 100644 index 000000000..aaac145ca --- /dev/null +++ b/src/lib/components/svg/services/Hasura.svelte @@ -0,0 +1,26 @@ + + + + + + + + + + + + + diff --git a/src/lib/components/svg/services/Umami.svelte b/src/lib/components/svg/services/Umami.svelte index ac0df85af..e1c9b1e67 100644 --- a/src/lib/components/svg/services/Umami.svelte +++ b/src/lib/components/svg/services/Umami.svelte @@ -5,8 +5,6 @@ { if (teamId === '0') { @@ -195,8 +196,18 @@ export async function getApplication({ id, teamId }: { id: string; teamId: strin return s; }); } + const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage( + body.buildPack + ); - return { ...body }; + // Set default build images + if (!body.baseImage) { + body.baseImage = baseImage; + } + if (!body.baseBuildImage) { + body.baseBuildImage = baseBuildImage; + } + return { ...body, baseBuildImages, baseImages }; } export async function configureGitRepository({ @@ -266,7 +277,9 @@ export async function configureApplication({ pythonVariable, dockerFileLocation, denoMainFile, - denoOptions + denoOptions, + baseImage, + baseBuildImage }: { id: string; buildPack: string; @@ -284,6 +297,8 @@ export async function configureApplication({ dockerFileLocation: string; denoMainFile: string; denoOptions: string; + baseImage: string; + baseBuildImage: string; }): Promise { return await prisma.application.update({ where: { id }, @@ -302,7 +317,9 @@ export async function configureApplication({ pythonVariable, dockerFileLocation, denoMainFile, - denoOptions + denoOptions, + baseImage, + baseBuildImage } }); } diff --git a/src/lib/database/common.ts b/src/lib/database/common.ts index 968208f55..314f77b08 100644 --- a/src/lib/database/common.ts +++ b/src/lib/database/common.ts @@ -11,11 +11,12 @@ import generator from 'generate-password'; import forge from 'node-forge'; import getPort, { portNumbers } from 'get-port'; -export function generatePassword(length = 24): string { +export function generatePassword(length = 24, symbols = false): string { return generator.generate({ length, numbers: true, - strict: true + strict: true, + symbols }); } diff --git a/src/lib/database/services.ts b/src/lib/database/services.ts index 4cb2e8b22..df760cf76 100644 --- a/src/lib/database/services.ts +++ b/src/lib/database/services.ts @@ -14,7 +14,9 @@ const include: Prisma.ServiceInclude = { wordpress: true, ghost: true, meiliSearch: true, - umami: true + umami: true, + hasura: true, + fider: true }; export async function listServicesWithIncludes() { return await prisma.service.findMany({ @@ -97,6 +99,17 @@ export async function getService({ id, teamId }: { id: string; teamId: string }) body.umami.umamiAdminPassword = decrypt(body.umami.umamiAdminPassword); if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt); + if (body.hasura?.postgresqlPassword) + body.hasura.postgresqlPassword = decrypt(body.hasura.postgresqlPassword); + if (body.hasura?.graphQLAdminPassword) + body.hasura.graphQLAdminPassword = decrypt(body.hasura.graphQLAdminPassword); + + if (body.fider?.postgresqlPassword) + body.fider.postgresqlPassword = decrypt(body.fider.postgresqlPassword); + if (body.fider?.jwtSecret) body.fider.jwtSecret = decrypt(body.fider.jwtSecret); + if (body.fider?.emailSmtpPassword) + body.fider.emailSmtpPassword = decrypt(body.fider.emailSmtpPassword); + const settings = await prisma.setting.findFirst(); return { ...body, settings }; @@ -243,6 +256,44 @@ export async function configureServiceType({ } } }); + } else if (type === 'hasura') { + const postgresqlUser = cuid(); + const postgresqlPassword = encrypt(generatePassword()); + const postgresqlDatabase = 'hasura'; + const graphQLAdminPassword = encrypt(generatePassword()); + await prisma.service.update({ + where: { id }, + data: { + type, + hasura: { + create: { + postgresqlDatabase, + postgresqlPassword, + postgresqlUser, + graphQLAdminPassword + } + } + } + }); + } else if (type === 'fider') { + const postgresqlUser = cuid(); + const postgresqlPassword = encrypt(generatePassword()); + const postgresqlDatabase = 'fider'; + const jwtSecret = encrypt(generatePassword(64, true)); + await prisma.service.update({ + where: { id }, + data: { + type, + fider: { + create: { + postgresqlDatabase, + postgresqlPassword, + postgresqlUser, + jwtSecret + } + } + } + }); } } @@ -301,52 +352,53 @@ export async function updateService({ return await prisma.service.update({ where: { id }, data: { fqdn, name } }); } -export async function updateLanguageToolService({ +export async function updateFiderService({ id, fqdn, - name + name, + emailNoreply, + emailMailgunApiKey, + emailMailgunDomain, + emailMailgunRegion, + emailSmtpHost, + emailSmtpPort, + emailSmtpUser, + emailSmtpPassword, + emailSmtpEnableStartTls }: { id: string; fqdn: string; name: string; + emailNoreply: string; + emailMailgunApiKey: string; + emailMailgunDomain: string; + emailMailgunRegion: string; + emailSmtpHost: string; + emailSmtpPort: number; + emailSmtpUser: string; + emailSmtpPassword: string; + emailSmtpEnableStartTls: boolean; }): Promise { - return await prisma.service.update({ where: { id }, data: { fqdn, name } }); -} - -export async function updateMeiliSearchService({ - id, - fqdn, - name -}: { - id: string; - fqdn: string; - name: string; -}): Promise { - return await prisma.service.update({ where: { id }, data: { fqdn, name } }); -} - -export async function updateVaultWardenService({ - id, - fqdn, - name -}: { - id: string; - fqdn: string; - name: string; -}): Promise { - return await prisma.service.update({ where: { id }, data: { fqdn, name } }); -} - -export async function updateVsCodeServer({ - id, - fqdn, - name -}: { - id: string; - fqdn: string; - name: string; -}): Promise { - return await prisma.service.update({ where: { id }, data: { fqdn, name } }); + return await prisma.service.update({ + where: { id }, + data: { + fqdn, + name, + fider: { + update: { + emailNoreply, + emailMailgunApiKey, + emailMailgunDomain, + emailMailgunRegion, + emailSmtpHost, + emailSmtpPort, + emailSmtpUser, + emailSmtpPassword, + emailSmtpEnableStartTls + } + } + } + }); } export async function updateWordpress({ @@ -398,8 +450,10 @@ export async function updateGhostService({ export async function removeService({ id }: { id: string }): Promise { await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } }); await prisma.meiliSearch.deleteMany({ where: { serviceId: id } }); + await prisma.fider.deleteMany({ where: { serviceId: id } }); await prisma.ghost.deleteMany({ where: { serviceId: id } }); await prisma.umami.deleteMany({ where: { serviceId: id } }); + await prisma.hasura.deleteMany({ where: { serviceId: id } }); await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } }); await prisma.minio.deleteMany({ where: { serviceId: id } }); await prisma.vscodeserver.deleteMany({ where: { serviceId: id } }); diff --git a/src/lib/docker.ts b/src/lib/docker.ts index d3c040c6a..0b60a4ed9 100644 --- a/src/lib/docker.ts +++ b/src/lib/docker.ts @@ -3,6 +3,34 @@ import { promises as fs } from 'fs'; import { checkPnpm } from './buildPacks/common'; import { saveBuildLog } from './common'; +export async function buildCacheImageForLaravel(data, imageForBuild) { + const { applicationId, tag, workdir, docker, buildId, debug, secrets, pullmergeRequestId } = data; + const Dockerfile: Array = []; + Dockerfile.push(`FROM ${imageForBuild}`); + Dockerfile.push('WORKDIR /app'); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); + if (secrets.length > 0) { + secrets.forEach((secret) => { + if (secret.isBuildSecret) { + if (pullmergeRequestId) { + if (secret.isPRMRSecret) { + Dockerfile.push(`ARG ${secret.name}=${secret.value}`); + } + } else { + if (!secret.isPRMRSecret) { + Dockerfile.push(`ARG ${secret.name}=${secret.value}`); + } + } + } + }); + } + Dockerfile.push(`COPY *.json *.mix.js /app/`); + Dockerfile.push(`COPY resources /app/resources`); + Dockerfile.push(`RUN yarn install && yarn production`); + await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n')); + await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug }); +} + export async function buildCacheImageWithNode(data, imageForBuild) { const { applicationId, @@ -21,7 +49,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) { const Dockerfile: Array = []; Dockerfile.push(`FROM ${imageForBuild}`); Dockerfile.push('WORKDIR /app'); - Dockerfile.push(`LABEL coolify.image=true`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (secrets.length > 0) { secrets.forEach((secret) => { if (secret.isBuildSecret) { @@ -41,10 +69,11 @@ export async function buildCacheImageWithNode(data, imageForBuild) { Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm'); Dockerfile.push('RUN pnpm add -g pnpm'); } - Dockerfile.push(`COPY .${baseDirectory || ''} ./`); if (installCommand) { + Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`); Dockerfile.push(`RUN ${installCommand}`); } + Dockerfile.push(`COPY .${baseDirectory || ''} ./`); Dockerfile.push(`RUN ${buildCommand}`); await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n')); await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug }); @@ -65,11 +94,13 @@ export async function buildCacheImageWithCargo(data, imageForBuild) { } = data; const Dockerfile: Array = []; Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push('WORKDIR /app'); Dockerfile.push('RUN cargo install cargo-chef'); Dockerfile.push('COPY . .'); Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json'); Dockerfile.push(`FROM ${imageForBuild}`); + Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push('WORKDIR /app'); Dockerfile.push('RUN cargo install cargo-chef'); Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`); diff --git a/src/lib/haproxy/configuration.ts b/src/lib/haproxy/configuration.ts index f2f9f500b..745e8f4af 100644 --- a/src/lib/haproxy/configuration.ts +++ b/src/lib/haproxy/configuration.ts @@ -95,6 +95,8 @@ backend {{domain}} {{/isHttps}} http-request add-header X-Forwarded-Host %[req.hdr(host),lower] server {{id}} {{id}}:{{port}} + compression algo gzip + compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype {{/isRunning}} {{/applications}} @@ -111,6 +113,8 @@ backend {{domain}} {{/isHttps}} http-request add-header X-Forwarded-Host %[req.hdr(host),lower] server {{id}} {{id}}:{{port}} + compression algo gzip + compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype {{/isRunning}} {{/services}} @@ -126,6 +130,8 @@ backend {{domain}} {{/isHttps}} http-request add-header X-Forwarded-Host %[req.hdr(host),lower] server {{id}} {{id}}:{{port}} check fall 10 + compression algo gzip + compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype {{/coolify}} `; diff --git a/src/lib/letsencrypt/index.ts b/src/lib/letsencrypt/index.ts index 7682ecb27..f443625ae 100644 --- a/src/lib/letsencrypt/index.ts +++ b/src/lib/letsencrypt/index.ts @@ -109,6 +109,7 @@ export async function generateSSLCerts(): Promise { include: { destinationDocker: true, settings: true }, orderBy: { createdAt: 'desc' } }); + const { fqdn, isDNSCheckEnabled } = await db.prisma.setting.findFirst(); for (const application of applications) { try { if (application.fqdn && application.destinationDockerId) { @@ -147,7 +148,6 @@ export async function generateSSLCerts(): Promise { } } const services = await listServicesWithIncludes(); - for (const service of services) { try { if (service.fqdn && service.destinationDockerId) { @@ -171,7 +171,6 @@ export async function generateSSLCerts(): Promise { console.log(`Error during generateSSLCerts with ${service.fqdn}: ${error}`); } } - const { fqdn } = await db.prisma.setting.findFirst(); if (fqdn) { const domain = getDomain(fqdn); const isHttps = fqdn.startsWith('https://'); @@ -193,73 +192,99 @@ export async function generateSSLCerts(): Promise { file.endsWith('.pem') && certificates.push(file.replace(/\.pem$/, '')); } } - const resolver = new dns.Resolver({ timeout: 2000 }); - resolver.setServers(['8.8.8.8', '1.1.1.1']); - let ipv4, ipv6; - try { - ipv4 = await (await asyncExecShell(`curl -4s https://ifconfig.io`)).stdout; - } catch (error) {} - try { - ipv6 = await (await asyncExecShell(`curl -6s https://ifconfig.io`)).stdout; - } catch (error) {} - for (const ssl of ssls) { - if (!dev) { - if ( - certificates.includes(ssl.domain) || - certificates.includes(ssl.domain.replace('www.', '')) - ) { - // console.log(`Certificate for ${ssl.domain} already exists`); - } else { - // Checking DNS entry before generating certificate - if (ipv4 || ipv6) { - let domains4 = []; - let domains6 = []; - try { - domains4 = await resolver.resolve4(ssl.domain); - } catch (error) {} - try { - domains6 = await resolver.resolve6(ssl.domain); - } catch (error) {} - if (domains4.length > 0 || domains6.length > 0) { - if ( - (ipv4 && domains4.includes(ipv4.replace('\n', ''))) || - (ipv6 && domains6.includes(ipv6.replace('\n', ''))) - ) { - console.log('Generating SSL for', ssl.domain); - return await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify); + if (isDNSCheckEnabled) { + const resolver = new dns.Resolver({ timeout: 2000 }); + resolver.setServers(['8.8.8.8', '1.1.1.1']); + let ipv4, ipv6; + try { + ipv4 = await (await asyncExecShell(`curl -4s https://ifconfig.io`)).stdout; + } catch (error) {} + try { + ipv6 = await (await asyncExecShell(`curl -6s https://ifconfig.io`)).stdout; + } catch (error) {} + for (const ssl of ssls) { + if (!dev) { + if ( + certificates.includes(ssl.domain) || + certificates.includes(ssl.domain.replace('www.', '')) + ) { + // console.log(`Certificate for ${ssl.domain} already exists`); + } else { + // Checking DNS entry before generating certificate + if (ipv4 || ipv6) { + let domains4 = []; + let domains6 = []; + try { + domains4 = await resolver.resolve4(ssl.domain); + } catch (error) {} + try { + domains6 = await resolver.resolve6(ssl.domain); + } catch (error) {} + if (domains4.length > 0 || domains6.length > 0) { + if ( + (ipv4 && domains4.includes(ipv4.replace('\n', ''))) || + (ipv6 && domains6.includes(ipv6.replace('\n', ''))) + ) { + console.log('Generating SSL for', ssl.domain); + return await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify); + } } } + console.log('DNS settings is incorrect for', ssl.domain, 'skipping.'); + } + } else { + if ( + certificates.includes(ssl.domain) || + certificates.includes(ssl.domain.replace('www.', '')) + ) { + console.log(`Certificate for ${ssl.domain} already exists`); + } else { + // Checking DNS entry before generating certificate + if (ipv4 || ipv6) { + let domains4 = []; + let domains6 = []; + try { + domains4 = await resolver.resolve4(ssl.domain); + } catch (error) {} + try { + domains6 = await resolver.resolve6(ssl.domain); + } catch (error) {} + if (domains4.length > 0 || domains6.length > 0) { + if ( + (ipv4 && domains4.includes(ipv4.replace('\n', ''))) || + (ipv6 && domains6.includes(ipv6.replace('\n', ''))) + ) { + console.log('Generating SSL for', ssl.domain); + return; + } + } + } + console.log('DNS settings is incorrect for', ssl.domain, 'skipping.'); + } + } + } + } else { + if (!dev) { + for (const ssl of ssls) { + if ( + certificates.includes(ssl.domain) || + certificates.includes(ssl.domain.replace('www.', '')) + ) { + } else { + console.log('Generating SSL for', ssl.domain); + return await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify); } - console.log('DNS settings is incorrect for', ssl.domain, 'skipping.'); } } else { - if ( - certificates.includes(ssl.domain) || - certificates.includes(ssl.domain.replace('www.', '')) - ) { - console.log(`Certificate for ${ssl.domain} already exists`); - } else { - // Checking DNS entry before generating certificate - if (ipv4 || ipv6) { - let domains4 = []; - let domains6 = []; - try { - domains4 = await resolver.resolve4(ssl.domain); - } catch (error) {} - try { - domains6 = await resolver.resolve6(ssl.domain); - } catch (error) {} - if (domains4.length > 0 || domains6.length > 0) { - if ( - (ipv4 && domains4.includes(ipv4.replace('\n', ''))) || - (ipv6 && domains6.includes(ipv6.replace('\n', ''))) - ) { - console.log('Generating SSL for', ssl.domain); - return; - } - } + for (const ssl of ssls) { + if ( + certificates.includes(ssl.domain) || + certificates.includes(ssl.domain.replace('www.', '')) + ) { + console.log(`Certificate for ${ssl.domain} already exists`); + } else { + console.log('Generating SSL for', ssl.domain); } - console.log('DNS settings is incorrect for', ssl.domain, 'skipping.'); } } } diff --git a/src/lib/locales/en.json b/src/lib/locales/en.json index d77d2a1c1..a7adfcef0 100644 --- a/src/lib/locales/en.json +++ b/src/lib/locales/en.json @@ -124,7 +124,7 @@ "no_branches_found": "No branches found", "configure_build_pack": "Configure Build Pack", "scanning_repository_suggest_build_pack": "Scanning repository to suggest a build pack for you...", - "found_lock_file": "Found lock file for {{packageManager}}. Using it for predefined commands commands.", + "found_lock_file": "Found lock file for {{packageManager}}.Using it for predefined commands commands.", "configure_destination": "Configure Destination", "no_configurable_destination": "No configurable Destination found", "select_a_repository_project": "Select a Repository / Project", @@ -184,6 +184,10 @@ "git_source": "Git Source", "git_repository": "Git Repository", "build_pack": "Build Pack", + "base_image": "Deplyoment Image", + "base_image_explainer": "Image that will be used for the deployment.", + "base_build_image": "Build Image", + "base_build_image_explainer": "Image that will be used during the build process.", "destination": "Destination", "application": "Application", "url_fqdn": "URL (FQDN)", @@ -226,7 +230,8 @@ "permission_denied_start_database": "You do not have permission to start the database.", "delete_database": "Delete Database", "permission_denied_delete_database": "You do not have permission to delete a Database", - "no_databases_found": "No databases found" + "no_databases_found": "No databases found", + "logs": "Database Logs" }, "destination": { "delete_destination": "Delete Destination", @@ -291,20 +296,24 @@ "permission_denied_start_service": "You do not have permission to start the service.", "delete_service": "Delete Service", "permission_denied_delete_service": "You do not have permission to delete a service.", - "no_service": "No services found" + "no_service": "No services found", + "logs": "Service Logs" }, "setting": { "change_language": "Change Language", "permission_denied": "You do not have permission to do this. \\nAsk an admin to modify your permissions.", "domain_removed": "Domain removed", - "ssl_explainer": "If you specify https, Coolify will be accessible only over https. SSL certificate will be generated for you.
If you specify www, Coolify will be redirected (302) from non-www and vice versa.", + "ssl_explainer": "If you specify https, Coolify will be accessible only over https. SSL certificate will be generated for you.
If you specify www, Coolify will be redirected (302) from non-www and vice versa.

WARNING: If you change an already set domain, it will brake webhooks and other integrations! You need to manually update them.", "must_remove_domain_before_changing": "Must remove the domain before you can change this setting.", "registration_allowed": "Registration allowed?", "registration_allowed_explainer": "Allow further registrations to the application.
It's turned off after the first registration.", "coolify_proxy_settings": "Coolify Proxy Settings", "credential_stat_explainer": "Credentials for stats page.", "auto_update_enabled": "Auto update enabled?", - "auto_update_enabled_explainer": "Enable automatic updates for Coolify. It will be done automatically behind the scenes, if there is no build process running." + "auto_update_enabled_explainer": "Enable automatic updates for Coolify. It will be done automatically behind the scenes, if there is no build process running.", + "generate_www_non_www_ssl": "It will generate certificates for both www and non-www.
You need to have both DNS entries set in advance.

Service needs to be restarted.", + "is_dns_check_enabled": "DNS check enabled?", + "is_dns_check_enabled_explainer": "You can disable DNS check before creating SSL certificates.

Turning it off is useful when Coolify is behind a reverse proxy or tunnel." }, "team": { "pending_invitations": "Pending invitations", diff --git a/src/lib/queues/builder.ts b/src/lib/queues/builder.ts index 8e1650106..3a74320ef 100644 --- a/src/lib/queues/builder.ts +++ b/src/lib/queues/builder.ts @@ -47,7 +47,9 @@ export default async function (job: Job): Promise): Promise): Promise { const destinationDockers = await prisma.destinationDocker.findMany(); const engines = [...new Set(destinationDockers.map(({ engine }) => engine))]; for (const engine of engines) { + let lowDiskSpace = false; const host = getEngine(engine); - // Cleanup old coolify images try { - let { stdout: images } = await asyncExecShell( - `DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs ` + const { stdout } = await asyncExecShell( + `DOCKER_HOST=${host} docker exec coolify sh -c 'df -kPT /'` ); - images = images.trim(); - if (images) { - await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`); + let lines = stdout.trim().split('\n'); + let header = lines[0]; + let regex = + /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g; + const boundaries = []; + let match; + + while ((match = regex.exec(header))) { + boundaries.push(match[0].length); + } + + boundaries[boundaries.length - 1] = -1; + const data = lines.slice(1).map((line) => { + const cl = boundaries.map((boundary) => { + const column = boundary > 0 ? line.slice(0, boundary) : line; + line = line.slice(boundary); + return column.trim(); + }); + return { + capacity: Number.parseInt(cl[5], 10) / 100 + }; + }); + if (data.length > 0) { + const { capacity } = data[0]; + if (capacity > 0.8) { + lowDiskSpace = true; + } } } catch (error) { - //console.log(error); + console.log(error); } - try { - await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`); - } catch (error) { - //console.log(error); - } - try { - await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f --filter "until=2h"`); - } catch (error) { - //console.log(error); - } - // Cleanup old images older than a day - try { - await asyncExecShell(`DOCKER_HOST=${host} docker image prune --filter "until=72h" -a -f`); - } catch (error) { - //console.log(error); + console.log(`Is LowDiskSpace detected? ${lowDiskSpace}`); + if (lowDiskSpace) { + // Cleanup old coolify images + try { + let { stdout: images } = await asyncExecShell( + `DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs ` + ); + images = images.trim(); + if (images) { + await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`); + } + } catch (error) { + //console.log(error); + } + try { + await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`); + } catch (error) { + //console.log(error); + } + try { + await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f --filter "until=2h"`); + } catch (error) { + //console.log(error); + } + // Cleanup old images older than a day + try { + await asyncExecShell(`DOCKER_HOST=${host} docker image prune --filter "until=72h" -a -f`); + } catch (error) { + //console.log(error); + } } } } diff --git a/src/lib/store.ts b/src/lib/store.ts index 1473d93b2..684922bfb 100644 --- a/src/lib/store.ts +++ b/src/lib/store.ts @@ -1,4 +1,5 @@ -import { writable, type Writable } from 'svelte/store'; +import { browser } from '$app/env'; +import { writable, type Writable, type Readable, readable } from 'svelte/store'; export const gitTokens: Writable<{ githubToken: string | null; gitlabToken: string | null }> = writable({ @@ -6,3 +7,8 @@ export const gitTokens: Writable<{ githubToken: string | null; gitlabToken: stri gitlabToken: null }); export const disabledButton: Writable = writable(false); + +export const features: Readable<{ latestVersion: string; beta: boolean }> = readable({ + beta: browser && window.localStorage.getItem('beta') === 'true', + latestVersion: browser && window.localStorage.getItem('latestVersion') +}); diff --git a/src/lib/types/builderJob.ts b/src/lib/types/builderJob.ts index 8b7cc3b9f..145f7cc46 100644 --- a/src/lib/types/builderJob.ts +++ b/src/lib/types/builderJob.ts @@ -34,6 +34,8 @@ export type BuilderJob = { persistentStorage: { path: string }[]; pullmergeRequestId?: unknown; sourceBranch?: string; + baseImage: string; + baseBuildImage: string; }; // TODO: Add the other build types diff --git a/src/routes/__layout.svelte b/src/routes/__layout.svelte index 87d55c9a3..5952073e0 100644 --- a/src/routes/__layout.svelte +++ b/src/routes/__layout.svelte @@ -25,7 +25,6 @@ if (res.ok) { return { props: { - selectedTeamId: session.teamId, ...(await res.json()) } }; @@ -35,9 +34,6 @@
@@ -310,6 +323,49 @@ />
+
+ +
+ +
+ {#if application.buildPack === 'laravel'} + + {:else} + + {/if} +
+ {/if}
{$t('application.application')}
@@ -476,21 +532,23 @@ />
{/if} -
-
- - + {#if application.buildPack !== 'laravel'} +
+
+ + +
+
- -
+ {/if} {#if !notNodeDeployments.includes(application.buildPack)}
diff --git a/src/routes/applications/[id]/logs/build/_BuildLog.svelte b/src/routes/applications/[id]/logs/build/_BuildLog.svelte index 9a8dea4cc..378e58c2c 100644 --- a/src/routes/applications/[id]/logs/build/_BuildLog.svelte +++ b/src/routes/applications/[id]/logs/build/_BuildLog.svelte @@ -8,7 +8,7 @@ import Loading from '$lib/components/Loading.svelte'; import LoadingLogs from '../_Loading.svelte'; - import { get } from '$lib/api'; + import { get, post } from '$lib/api'; import { errorNotification } from '$lib/form'; import { t } from '$lib/translations'; @@ -20,6 +20,8 @@ let followingInterval; let logsEl; + let cancelInprogress = false; + const { id } = $page.params; const cleanAnsiCodes = (str: string) => str.replace(/\x1B\[(\d+)m/g, ''); @@ -67,6 +69,19 @@ return errorNotification(error); } } + async function cancelBuild() { + if (cancelInprogress) return; + try { + cancelInprogress = true; + await post(`/applications/${id}/cancel.json`, { + buildId, + applicationId: id + }); + } catch (error) { + console.log(error); + return errorNotification(error); + } + } onDestroy(() => { clearInterval(streamInterval); clearInterval(followingInterval); @@ -90,7 +105,7 @@
+ {#if currentStatus === 'running'} + + {/if}
+
-
+
Application Logs
- {application.name} + {application.name}
{#if application.fqdn} diff --git a/src/routes/applications/index.svelte b/src/routes/applications/index.svelte index 3295b8eeb..07e4564c5 100644 --- a/src/routes/applications/index.svelte +++ b/src/routes/applications/index.svelte @@ -22,6 +22,7 @@ import Astro from '$lib/components/svg/applications/Astro.svelte'; import Eleventy from '$lib/components/svg/applications/Eleventy.svelte'; import Deno from '$lib/components/svg/applications/Deno.svelte'; + import Laravel from '$lib/components/svg/applications/Laravel.svelte'; async function newApplication() { const { id } = await post('/applications/new', {}); @@ -104,6 +105,8 @@ {:else if application.buildPack.toLowerCase() === 'deno'} + {:else if application.buildPack.toLowerCase() === 'laravel'} + {/if} {/if} @@ -162,6 +165,8 @@ {:else if application.buildPack.toLowerCase() === 'deno'} + {:else if application.buildPack.toLowerCase() === 'laravel'} + {/if} {/if} diff --git a/src/routes/databases/[id]/__layout.svelte b/src/routes/databases/[id]/__layout.svelte index 6b62c2cd1..2696910f7 100644 --- a/src/routes/databases/[id]/__layout.svelte +++ b/src/routes/databases/[id]/__layout.svelte @@ -57,7 +57,7 @@ + + + +
+
+
+ Database Logs +
+ {database.name} +
+ + {#if database.fqdn} + + + + + + + {/if} +
+
+ {#if logs.length === 0} +
{$t('application.build.waiting_logs')}
+ {:else} +
+
+ {#if loadLogsInterval} + + {/if} +
+ +
+
+
+ {#each logs as log} + {log + '\n'} + {/each} +
+
+
+ {/if} +
diff --git a/src/routes/iam/index.svelte b/src/routes/iam/index.svelte index 67b104baa..d7f214aca 100644 --- a/src/routes/iam/index.svelte +++ b/src/routes/iam/index.svelte @@ -25,7 +25,7 @@
@@ -175,20 +187,39 @@
{#each ownTeams as team} -
-
- {team.name} + diff --git a/src/routes/logs.json.ts b/src/routes/logs.json.ts deleted file mode 100644 index 452d455e1..000000000 --- a/src/routes/logs.json.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { RequestHandler } from '@sveltejs/kit'; -import * as db from '$lib/database'; - -export const post: RequestHandler = async (event) => { - const data = await event.request.json(); - for (const d of data) { - if (d.container_name) { - const { log, container_name: containerId, source } = d; - console.log(log); - // await db.prisma.applicationLogs.create({ data: { log, containerId: containerId.substr(1), source } }); - } - } - - return { - status: 200, - body: {} - }; -}; diff --git a/src/routes/services/[id]/_Services/_Fider.svelte b/src/routes/services/[id]/_Services/_Fider.svelte new file mode 100644 index 000000000..180235d57 --- /dev/null +++ b/src/routes/services/[id]/_Services/_Fider.svelte @@ -0,0 +1,184 @@ + + +
+
Fider
+
+ +
+ + +
+ +
+ + +
+
+
Email
+
+
+ + +
+ +
+ + +
+
+ +
+ +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+
PostgreSQL
+
+ +
+ + +
+
+ + +
+
+ + +
diff --git a/src/routes/services/[id]/_Services/_Hasura.svelte b/src/routes/services/[id]/_Services/_Hasura.svelte new file mode 100644 index 000000000..5930e2ded --- /dev/null +++ b/src/routes/services/[id]/_Services/_Hasura.svelte @@ -0,0 +1,58 @@ + + +
+
Hasura
+
+ +
+ + +
+ +
+
PostgreSQL
+
+ +
+ + +
+
+ + +
+
+ + +
diff --git a/src/routes/services/[id]/_Services/_Services.svelte b/src/routes/services/[id]/_Services/_Services.svelte index 85c1c5abf..a2a2effac 100644 --- a/src/routes/services/[id]/_Services/_Services.svelte +++ b/src/routes/services/[id]/_Services/_Services.svelte @@ -12,7 +12,9 @@ import { errorNotification } from '$lib/form'; import { t } from '$lib/translations'; import { toast } from '@zerodevx/svelte-toast'; + import Fider from './_Fider.svelte'; import Ghost from './_Ghost.svelte'; + import Hasura from './_Hasura.svelte'; import MeiliSearch from './_MeiliSearch.svelte'; import MinIo from './_MinIO.svelte'; import PlausibleAnalytics from './_PlausibleAnalytics.svelte'; @@ -172,6 +174,10 @@ {:else if service.type === 'umami'} + {:else if service.type === 'hasura'} + + {:else if service.type === 'fider'} + {/if}
diff --git a/src/routes/services/[id]/__layout.svelte b/src/routes/services/[id]/__layout.svelte index 82fbd19c7..aa7026fd4 100644 --- a/src/routes/services/[id]/__layout.svelte +++ b/src/routes/services/[id]/__layout.svelte @@ -270,6 +270,38 @@
+ + {/if} diff --git a/src/routes/services/[id]/fider/index.json.ts b/src/routes/services/[id]/fider/index.json.ts new file mode 100644 index 000000000..b561e3392 --- /dev/null +++ b/src/routes/services/[id]/fider/index.json.ts @@ -0,0 +1,57 @@ +import { getUserDetails } from '$lib/common'; +import { encrypt } from '$lib/crypto'; +import * as db from '$lib/database'; +import { ErrorHandler } from '$lib/database'; +import type { RequestHandler } from '@sveltejs/kit'; + +export const post: RequestHandler = async (event) => { + const { status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + let { + name, + fqdn, + fider: { + emailNoreply, + emailMailgunApiKey, + emailMailgunDomain, + emailMailgunRegion, + emailSmtpHost, + emailSmtpPort, + emailSmtpUser, + emailSmtpPassword, + emailSmtpEnableStartTls + } + } = await event.request.json(); + + if (fqdn) fqdn = fqdn.toLowerCase(); + if (emailNoreply) emailNoreply = emailNoreply.toLowerCase(); + if (emailSmtpHost) emailSmtpHost = emailSmtpHost.toLowerCase(); + if (emailSmtpPassword) { + emailSmtpPassword = encrypt(emailSmtpPassword); + } + if (emailSmtpPort) emailSmtpPort = Number(emailSmtpPort); + if (emailSmtpEnableStartTls) emailSmtpEnableStartTls = Boolean(emailSmtpEnableStartTls); + + try { + await db.updateFiderService({ + id, + fqdn, + name, + emailNoreply, + emailMailgunApiKey, + emailMailgunDomain, + emailMailgunRegion, + emailSmtpHost, + emailSmtpPort, + emailSmtpUser, + emailSmtpPassword, + emailSmtpEnableStartTls + }); + return { status: 201 }; + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/fider/start.json.ts b/src/routes/services/[id]/fider/start.json.ts new file mode 100644 index 000000000..812497620 --- /dev/null +++ b/src/routes/services/[id]/fider/start.json.ts @@ -0,0 +1,154 @@ +import { + asyncExecShell, + createDirectories, + getDomain, + getEngine, + getUserDetails +} from '$lib/common'; +import * as db from '$lib/database'; +import { promises as fs } from 'fs'; +import yaml from 'js-yaml'; +import type { RequestHandler } from '@sveltejs/kit'; +import { ErrorHandler, getServiceImage } from '$lib/database'; +import { makeLabelForServices } from '$lib/buildPacks/common'; +import type { ComposeFile } from '$lib/types/composeFile'; +import type { Service, DestinationDocker, Prisma } from '@prisma/client'; + +export const post: RequestHandler = async (event) => { + const { teamId, status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + try { + const service: Service & Prisma.ServiceInclude & { destinationDocker: DestinationDocker } = + await db.getService({ id, teamId }); + const { + type, + version, + fqdn, + destinationDockerId, + destinationDocker, + serviceSecret, + fider: { + postgresqlUser, + postgresqlPassword, + postgresqlDatabase, + jwtSecret, + emailNoreply, + emailMailgunApiKey, + emailMailgunDomain, + emailMailgunRegion, + emailSmtpHost, + emailSmtpPort, + emailSmtpUser, + emailSmtpPassword, + emailSmtpEnableStartTls + } + } = service; + const network = destinationDockerId && destinationDocker.network; + const host = getEngine(destinationDocker.engine); + + const { workdir } = await createDirectories({ repository: type, buildId: id }); + const image = getServiceImage(type); + const domain = getDomain(fqdn); + const config = { + fider: { + image: `${image}:${version}`, + environmentVariables: { + HOST_DOMAIN: domain, + DATABASE_URL: `postgresql://${postgresqlUser}:${postgresqlPassword}@${id}-postgresql:5432/${postgresqlDatabase}?sslmode=disable`, + JWT_SECRET: `${jwtSecret.replace(/\$/g, '$$$')}`, + EMAIL_NOREPLY: emailNoreply, + EMAIL_MAILGUN_API: emailMailgunApiKey, + EMAIL_MAILGUN_REGION: emailMailgunRegion, + EMAIL_MAILGUN_DOMAIN: emailMailgunDomain, + EMAIL_SMTP_HOST: emailSmtpHost, + EMAIL_SMTP_PORT: emailSmtpPort, + EMAIL_SMTP_USER: emailSmtpUser, + EMAIL_SMTP_PASSWORD: emailSmtpPassword, + EMAIL_SMTP_ENABLE_STARTTLS: emailSmtpEnableStartTls + } + }, + postgresql: { + image: 'postgres:12-alpine', + volume: `${id}-postgresql-data:/var/lib/postgresql/data`, + environmentVariables: { + POSTGRES_USER: postgresqlUser, + POSTGRES_PASSWORD: postgresqlPassword, + POSTGRES_DB: postgresqlDatabase + } + } + }; + if (serviceSecret.length > 0) { + serviceSecret.forEach((secret) => { + config.fider.environmentVariables[secret.name] = secret.value; + }); + } + + const composeFile: ComposeFile = { + version: '3.8', + services: { + [id]: { + container_name: id, + image: config.fider.image, + environment: config.fider.environmentVariables, + networks: [network], + volumes: [], + restart: 'always', + labels: makeLabelForServices('fider'), + deploy: { + restart_policy: { + condition: 'on-failure', + delay: '5s', + max_attempts: 3, + window: '120s' + } + }, + depends_on: [`${id}-postgresql`] + }, + [`${id}-postgresql`]: { + image: config.postgresql.image, + container_name: `${id}-postgresql`, + environment: config.postgresql.environmentVariables, + networks: [network], + volumes: [config.postgresql.volume], + restart: 'always', + deploy: { + restart_policy: { + condition: 'on-failure', + delay: '5s', + max_attempts: 3, + window: '120s' + } + } + } + }, + networks: { + [network]: { + external: true + } + }, + volumes: { + [config.postgresql.volume.split(':')[0]]: { + name: config.postgresql.volume.split(':')[0] + } + } + }; + const composeFileDestination = `${workdir}/docker-compose.yaml`; + await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); + + try { + await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} pull`); + await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} up -d`); + return { + status: 200 + }; + } catch (error) { + console.log(error); + return ErrorHandler(error); + } + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/fider/stop.json.ts b/src/routes/services/[id]/fider/stop.json.ts new file mode 100644 index 000000000..67dd96d04 --- /dev/null +++ b/src/routes/services/[id]/fider/stop.json.ts @@ -0,0 +1,42 @@ +import { getUserDetails, removeDestinationDocker } from '$lib/common'; +import * as db from '$lib/database'; +import { ErrorHandler } from '$lib/database'; +import { checkContainer, stopTcpHttpProxy } from '$lib/haproxy'; +import type { RequestHandler } from '@sveltejs/kit'; + +export const post: RequestHandler = async (event) => { + const { teamId, status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + try { + const service = await db.getService({ id, teamId }); + const { destinationDockerId, destinationDocker } = service; + if (destinationDockerId) { + const engine = destinationDocker.engine; + + try { + const found = await checkContainer(engine, id); + if (found) { + await removeDestinationDocker({ id, engine }); + } + } catch (error) { + console.error(error); + } + try { + const found = await checkContainer(engine, `${id}-postgresql`); + if (found) { + await removeDestinationDocker({ id: `${id}-postgresql`, engine }); + } + } catch (error) { + console.error(error); + } + } + return { + status: 200 + }; + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/hasura/index.json.ts b/src/routes/services/[id]/hasura/index.json.ts new file mode 100644 index 000000000..d717502c5 --- /dev/null +++ b/src/routes/services/[id]/hasura/index.json.ts @@ -0,0 +1,21 @@ +import { getUserDetails } from '$lib/common'; +import * as db from '$lib/database'; +import { ErrorHandler } from '$lib/database'; +import type { RequestHandler } from '@sveltejs/kit'; + +export const post: RequestHandler = async (event) => { + const { status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + let { name, fqdn } = await event.request.json(); + if (fqdn) fqdn = fqdn.toLowerCase(); + + try { + await db.updateService({ id, fqdn, name }); + return { status: 201 }; + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/hasura/start.json.ts b/src/routes/services/[id]/hasura/start.json.ts new file mode 100644 index 000000000..325d6e33c --- /dev/null +++ b/src/routes/services/[id]/hasura/start.json.ts @@ -0,0 +1,122 @@ +import { asyncExecShell, createDirectories, getEngine, getUserDetails } from '$lib/common'; +import * as db from '$lib/database'; +import { promises as fs } from 'fs'; +import yaml from 'js-yaml'; +import type { RequestHandler } from '@sveltejs/kit'; +import { ErrorHandler, getServiceImage } from '$lib/database'; +import { makeLabelForServices } from '$lib/buildPacks/common'; +import type { ComposeFile } from '$lib/types/composeFile'; +import type { Service, DestinationDocker, Prisma } from '@prisma/client'; + +export const post: RequestHandler = async (event) => { + const { teamId, status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + try { + const service: Service & Prisma.ServiceInclude & { destinationDocker: DestinationDocker } = + await db.getService({ id, teamId }); + const { + type, + version, + destinationDockerId, + destinationDocker, + serviceSecret, + hasura: { postgresqlUser, postgresqlPassword, postgresqlDatabase } + } = service; + const network = destinationDockerId && destinationDocker.network; + const host = getEngine(destinationDocker.engine); + + const { workdir } = await createDirectories({ repository: type, buildId: id }); + const image = getServiceImage(type); + + const config = { + hasura: { + image: `${image}:${version}`, + environmentVariables: { + HASURA_GRAPHQL_METADATA_DATABASE_URL: `postgresql://${postgresqlUser}:${postgresqlPassword}@${id}-postgresql:5432/${postgresqlDatabase}` + } + }, + postgresql: { + image: 'postgres:12-alpine', + volume: `${id}-postgresql-data:/var/lib/postgresql/data`, + environmentVariables: { + POSTGRES_USER: postgresqlUser, + POSTGRES_PASSWORD: postgresqlPassword, + POSTGRES_DB: postgresqlDatabase + } + } + }; + if (serviceSecret.length > 0) { + serviceSecret.forEach((secret) => { + config.hasura.environmentVariables[secret.name] = secret.value; + }); + } + + const composeFile: ComposeFile = { + version: '3.8', + services: { + [id]: { + container_name: id, + image: config.hasura.image, + environment: config.hasura.environmentVariables, + networks: [network], + volumes: [], + restart: 'always', + labels: makeLabelForServices('hasura'), + deploy: { + restart_policy: { + condition: 'on-failure', + delay: '5s', + max_attempts: 3, + window: '120s' + } + }, + depends_on: [`${id}-postgresql`] + }, + [`${id}-postgresql`]: { + image: config.postgresql.image, + container_name: `${id}-postgresql`, + environment: config.postgresql.environmentVariables, + networks: [network], + volumes: [config.postgresql.volume], + restart: 'always', + deploy: { + restart_policy: { + condition: 'on-failure', + delay: '5s', + max_attempts: 3, + window: '120s' + } + } + } + }, + networks: { + [network]: { + external: true + } + }, + volumes: { + [config.postgresql.volume.split(':')[0]]: { + name: config.postgresql.volume.split(':')[0] + } + } + }; + const composeFileDestination = `${workdir}/docker-compose.yaml`; + await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); + + try { + await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} pull`); + await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} up -d`); + return { + status: 200 + }; + } catch (error) { + console.log(error); + return ErrorHandler(error); + } + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/hasura/stop.json.ts b/src/routes/services/[id]/hasura/stop.json.ts new file mode 100644 index 000000000..67dd96d04 --- /dev/null +++ b/src/routes/services/[id]/hasura/stop.json.ts @@ -0,0 +1,42 @@ +import { getUserDetails, removeDestinationDocker } from '$lib/common'; +import * as db from '$lib/database'; +import { ErrorHandler } from '$lib/database'; +import { checkContainer, stopTcpHttpProxy } from '$lib/haproxy'; +import type { RequestHandler } from '@sveltejs/kit'; + +export const post: RequestHandler = async (event) => { + const { teamId, status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + + try { + const service = await db.getService({ id, teamId }); + const { destinationDockerId, destinationDocker } = service; + if (destinationDockerId) { + const engine = destinationDocker.engine; + + try { + const found = await checkContainer(engine, id); + if (found) { + await removeDestinationDocker({ id, engine }); + } + } catch (error) { + console.error(error); + } + try { + const found = await checkContainer(engine, `${id}-postgresql`); + if (found) { + await removeDestinationDocker({ id: `${id}-postgresql`, engine }); + } + } catch (error) { + console.error(error); + } + } + return { + status: 200 + }; + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/logs/_Loading.svelte b/src/routes/services/[id]/logs/_Loading.svelte new file mode 100644 index 000000000..73fbe709e --- /dev/null +++ b/src/routes/services/[id]/logs/_Loading.svelte @@ -0,0 +1,41 @@ +
+
+
+
+ + diff --git a/src/routes/services/[id]/logs/index.json.ts b/src/routes/services/[id]/logs/index.json.ts new file mode 100644 index 000000000..fe94db9ab --- /dev/null +++ b/src/routes/services/[id]/logs/index.json.ts @@ -0,0 +1,66 @@ +import { getUserDetails } from '$lib/common'; +import * as db from '$lib/database'; +import { ErrorHandler } from '$lib/database'; +import { dayjs } from '$lib/dayjs'; +import { dockerInstance } from '$lib/docker'; +import type { RequestHandler } from '@sveltejs/kit'; + +export const get: RequestHandler = async (event) => { + const { status, body } = await getUserDetails(event); + if (status === 401) return { status, body }; + + const { id } = event.params; + let since = event.url.searchParams.get('since') || 0; + if (since !== 0) { + since = dayjs(since).unix(); + } + try { + const { destinationDockerId, destinationDocker } = await db.prisma.service.findUnique({ + where: { id }, + include: { destinationDocker: true } + }); + if (destinationDockerId) { + const docker = dockerInstance({ destinationDocker }); + try { + const container = await docker.engine.getContainer(id); + if (container) { + const logs = ( + await container.logs({ + stdout: true, + stderr: true, + timestamps: true, + since, + tail: 5000 + }) + ) + .toString() + .split('\n') + .map((l) => l.slice(8)) + .filter((a) => a); + return { + body: { + logs + } + }; + } + } catch (error) { + const { statusCode } = error; + if (statusCode === 404) { + return { + body: { + logs: [] + } + }; + } + } + } + return { + status: 200, + body: { + message: 'No logs found.' + } + }; + } catch (error) { + return ErrorHandler(error); + } +}; diff --git a/src/routes/services/[id]/logs/index.svelte b/src/routes/services/[id]/logs/index.svelte new file mode 100644 index 000000000..ffbd109de --- /dev/null +++ b/src/routes/services/[id]/logs/index.svelte @@ -0,0 +1,179 @@ + + + + +
+
+
+ Service Logs +
+ {service.name} +
+ + {#if service.fqdn} + + + + + + + {/if} +
+
+ {#if logs.length === 0} +
{$t('application.build.waiting_logs')}
+ {:else} +
+
+ {#if loadLogsInterval} + + {/if} +
+ +
+
+
+ {#each logs as log} + {log + '\n'} + {/each} +
+
+
+ {/if} +
diff --git a/src/routes/services/index.svelte b/src/routes/services/index.svelte index fd784d049..bab3a4322 100644 --- a/src/routes/services/index.svelte +++ b/src/routes/services/index.svelte @@ -16,6 +16,8 @@ import { session } from '$app/stores'; import { getDomain } from '$lib/components/common'; import Umami from '$lib/components/svg/services/Umami.svelte'; + import Hasura from '$lib/components/svg/services/Hasura.svelte'; + import Fider from '$lib/components/svg/services/Fider.svelte'; export let services; async function newService() { @@ -89,6 +91,10 @@ {:else if service.type === 'umami'} + {:else if service.type === 'hasura'} + + {:else if service.type === 'fider'} + {/if}
{service.name} @@ -138,6 +144,10 @@ {:else if service.type === 'umami'} + {:else if service.type === 'hasura'} + + {:else if service.type === 'fider'} + {/if}
{service.name} diff --git a/src/routes/settings/index.json.ts b/src/routes/settings/index.json.ts index 210f12782..5546d782f 100644 --- a/src/routes/settings/index.json.ts +++ b/src/routes/settings/index.json.ts @@ -64,13 +64,20 @@ export const post: RequestHandler = async (event) => { }; if (status === 401) return { status, body }; - const { fqdn, isRegistrationEnabled, dualCerts, minPort, maxPort, isAutoUpdateEnabled } = - await event.request.json(); + const { + fqdn, + isRegistrationEnabled, + dualCerts, + minPort, + maxPort, + isAutoUpdateEnabled, + isDNSCheckEnabled + } = await event.request.json(); try { const { id } = await db.listSettings(); await db.prisma.setting.update({ where: { id }, - data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled } + data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled } }); if (fqdn) { await db.prisma.setting.update({ where: { id }, data: { fqdn } }); diff --git a/src/routes/settings/index.svelte b/src/routes/settings/index.svelte index dce1362d7..6a9b6ba5a 100644 --- a/src/routes/settings/index.svelte +++ b/src/routes/settings/index.svelte @@ -28,8 +28,6 @@ import { session } from '$app/stores'; export let settings; - import Cookies from 'js-cookie'; - import langs from '$lib/lang.json'; import Setting from '$lib/components/Setting.svelte'; import Explainer from '$lib/components/Explainer.svelte'; import { errorNotification } from '$lib/form'; @@ -39,10 +37,12 @@ import { getDomain } from '$lib/components/common'; import { toast } from '@zerodevx/svelte-toast'; import { t } from '$lib/translations'; + import { features } from '$lib/store'; let isRegistrationEnabled = settings.isRegistrationEnabled; let dualCerts = settings.dualCerts; let isAutoUpdateEnabled = settings.isAutoUpdateEnabled; + let isDNSCheckEnabled = settings.isDNSCheckEnabled; let minPort = settings.minPort; let maxPort = settings.maxPort; @@ -78,7 +78,15 @@ if (name === 'isAutoUpdateEnabled') { isAutoUpdateEnabled = !isAutoUpdateEnabled; } - await post(`/settings.json`, { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled }); + if (name === 'isDNSCheckEnabled') { + isDNSCheckEnabled = !isDNSCheckEnabled; + } + await post(`/settings.json`, { + isRegistrationEnabled, + dualCerts, + isAutoUpdateEnabled, + isDNSCheckEnabled + }); return toast.push(t.get('application.settings_saved')); } catch ({ error }) { return errorNotification(error); @@ -176,13 +184,21 @@ />
+
+ changeSettings('isDNSCheckEnabled')} + /> +
!isFqdnSet && changeSettings('dualCerts')} />
@@ -194,7 +210,7 @@ on:click={() => changeSettings('isRegistrationEnabled')} />
- {#if browser && (window.location.hostname === 'staging.coolify.io' || window.location.hostname === 'localhost')} + {#if browser && $features.beta}
{ type: 'webhook_commit' } }); - await buildQueue.add(buildId, { - build_id: buildId, - type: 'webhook_commit', - ...applicationFound - }); + await buildQueue.add( + buildId, + { + build_id: buildId, + type: 'webhook_commit', + ...applicationFound + }, + { jobId: buildId } + ); return { status: 200, body: { @@ -160,13 +164,17 @@ export const post: RequestHandler = async (event) => { type: 'webhook_pr' } }); - await buildQueue.add(buildId, { - build_id: buildId, - type: 'webhook_pr', - ...applicationFound, - sourceBranch, - pullmergeRequestId - }); + await buildQueue.add( + buildId, + { + build_id: buildId, + type: 'webhook_pr', + ...applicationFound, + sourceBranch, + pullmergeRequestId + }, + { jobId: buildId } + ); return { status: 200, body: { diff --git a/src/routes/webhooks/gitlab/events.ts b/src/routes/webhooks/gitlab/events.ts index f8bb54383..d84646088 100644 --- a/src/routes/webhooks/gitlab/events.ts +++ b/src/routes/webhooks/gitlab/events.ts @@ -73,11 +73,15 @@ export const post: RequestHandler = async (event) => { type: 'webhook_commit' } }); - await buildQueue.add(buildId, { - build_id: buildId, - type: 'webhook_commit', - ...applicationFound - }); + await buildQueue.add( + buildId, + { + build_id: buildId, + type: 'webhook_commit', + ...applicationFound + }, + { jobId: buildId } + ); return { status: 200, body: { @@ -156,13 +160,17 @@ export const post: RequestHandler = async (event) => { type: 'webhook_mr' } }); - await buildQueue.add(buildId, { - build_id: buildId, - type: 'webhook_mr', - ...applicationFound, - sourceBranch, - pullmergeRequestId - }); + await buildQueue.add( + buildId, + { + build_id: buildId, + type: 'webhook_mr', + ...applicationFound, + sourceBranch, + pullmergeRequestId + }, + { jobId: buildId } + ); return { status: 200, body: { diff --git a/src/tailwind.css b/src/tailwind.css index 1e48bb81b..529c91cb9 100644 --- a/src/tailwind.css +++ b/src/tailwind.css @@ -46,14 +46,14 @@ #svelte .custom-select-wrapper .selectContainer input { } #svelte .custom-select-wrapper .selectContainer { - @apply h-12 w-96 rounded border-none bg-coolgray-200 p-2 text-xs font-bold tracking-tight outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 md:text-sm; + @apply h-12 w-96 rounded border-none bg-coolgray-200 p-2 px-0 text-xs tracking-tight outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 md:text-sm; } #svelte .listContainer { @apply bg-coolgray-400 text-white scrollbar-w-2 scrollbar-thumb-green-500 scrollbar-track-coolgray-200; } #svelte .selectedItem { - @apply pl-3; + @apply pl-2; } #svelte .item.hover { @@ -64,7 +64,7 @@ #svelte .item.active { } select { - @apply h-12 w-96 rounded bg-coolgray-200 p-2 text-xs font-bold tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:text-stone-600 md:text-sm; + @apply h-12 w-96 rounded bg-coolgray-200 p-2 text-xs font-bold tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:text-stone-600 md:text-sm; } .svelte-select { --background: rgb(32 32 32);