backups... backups everywhere
This commit is contained in:
parent
8a00b711be
commit
f12d453b5f
5
.gitignore
vendored
5
.gitignore
vendored
@ -10,8 +10,11 @@ package
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/core*
|
||||
apps/backup/backups/*
|
||||
!apps/backup/backups/.gitkeep
|
||||
logs
|
||||
others/certificates
|
||||
backups/*
|
||||
!backups/.gitkeep
|
@ -467,7 +467,6 @@ async function plausibleAnalytics(service: any, template: any) {
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
||||
}
|
||||
|
||||
async function migrateSettings(settings: any[], service: any, template: any) {
|
||||
for (const setting of settings) {
|
||||
try {
|
||||
@ -528,4 +527,4 @@ async function createVolumes(service: any, template: any) {
|
||||
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
|
||||
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ import {
|
||||
uniqueName,
|
||||
version,
|
||||
sentryDSN,
|
||||
executeDockerCmd,
|
||||
} from "../../../lib/common";
|
||||
import { scheduler } from "../../../lib/scheduler";
|
||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||
@ -25,6 +26,35 @@ export async function hashPassword(password: string): Promise<string> {
|
||||
return bcrypt.hash(password, saltRounds);
|
||||
}
|
||||
|
||||
export async function backup(request: FastifyRequest) {
|
||||
try {
|
||||
const { backupData } = request.params;
|
||||
let std = null;
|
||||
const [id, backupType, type, zipped, storage] = backupData.split(':')
|
||||
console.log(id, backupType, type, zipped, storage)
|
||||
const database = await prisma.database.findUnique({ where: { id } })
|
||||
if (database) {
|
||||
// await executeDockerCmd({
|
||||
// dockerId: database.destinationDockerId,
|
||||
// command: `docker pull coollabsio/backup:latest`,
|
||||
// })
|
||||
std = await executeDockerCmd({
|
||||
dockerId: database.destinationDockerId,
|
||||
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
|
||||
})
|
||||
|
||||
}
|
||||
if (std.stdout) {
|
||||
return std.stdout;
|
||||
}
|
||||
if (std.stderr) {
|
||||
return std.stderr;
|
||||
}
|
||||
return 'nope';
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function cleanupManually(request: FastifyRequest) {
|
||||
try {
|
||||
const { serverId } = request.body;
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
|
||||
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify, backup } from './handlers';
|
||||
import { GetCurrentUser } from './types';
|
||||
|
||||
export interface Update {
|
||||
@ -52,6 +52,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.post('/internal/cleanup', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await cleanupManually(request));
|
||||
|
||||
fastify.get('/internal/backup/:backupData', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await backup(request));
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
2
apps/backup/.dockerignore
Normal file
2
apps/backup/.dockerignore
Normal file
@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
backup/*
|
27
apps/backup/Dockerfile
Normal file
27
apps/backup/Dockerfile
Normal file
@ -0,0 +1,27 @@
|
||||
ARG PNPM_VERSION=7.17.1
|
||||
|
||||
FROM node:18-slim as build
|
||||
WORKDIR /app
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
|
||||
COPY ./package*.json .
|
||||
RUN pnpm install -p
|
||||
COPY . .
|
||||
|
||||
# Production build
|
||||
FROM node:18-slim
|
||||
ARG DOCKER_VERSION=20.10.18
|
||||
ARG TARGETPLATFORM
|
||||
ENV NODE_ENV production
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt update && apt -y install curl
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
|
||||
RUN chmod +x /usr/bin/docker
|
||||
COPY --from=minio/mc:latest /usr/bin/mc /usr/bin/mc
|
||||
COPY --from=build /app/ .
|
||||
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
CMD node /app/src/index.mjs
|
0
apps/backup/backups/.gitkeep
Normal file
0
apps/backup/backups/.gitkeep
Normal file
24
apps/backup/package.json
Normal file
24
apps/backup/package.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "backup",
|
||||
"version": "0.0.1",
|
||||
"description": "",
|
||||
"author": "Andras Bacsai",
|
||||
"license": "Apache-2.0",
|
||||
"main": "index.mjs",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "NODE_ENV=production node src/index.mjs",
|
||||
"dev": "pnpm cleanup && NODE_ENV=development node src/index.mjs",
|
||||
"build": "docker build -t backup .",
|
||||
"test": "pnpm build && docker run -ti --rm -v /var/run/docker.sock:/var/run/docker.sock -v /root/devel/coolify/apps/backup/backups:/app/backups -e CONTAINERS_TO_BACKUP='clatmhc6000008lvb5a5tnvsk:database:mysql:local' backup",
|
||||
"cleanup": "rm -rf backups/*"
|
||||
},
|
||||
"keywords": [],
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.222.0",
|
||||
"@aws-sdk/lib-storage": "^3.222.0",
|
||||
"cuid": "2.1.8",
|
||||
"dotenv": "16.0.3",
|
||||
"zx": "7.1.1"
|
||||
}
|
||||
}
|
126
apps/backup/src/index.mjs
Normal file
126
apps/backup/src/index.mjs
Normal file
@ -0,0 +1,126 @@
|
||||
import * as dotenv from 'dotenv';
|
||||
dotenv.config()
|
||||
|
||||
import 'zx/globals';
|
||||
import cuid from 'cuid';
|
||||
import { S3, PutObjectCommand } from "@aws-sdk/client-s3";
|
||||
import fs from 'fs';
|
||||
|
||||
const isDev = process.env.NODE_ENV === 'development'
|
||||
$.verbose = !!isDev
|
||||
|
||||
if (!process.env.CONTAINERS_TO_BACKUP && !isDev) {
|
||||
console.log(chalk.red(`No containers to backup!`))
|
||||
process.exit(1)
|
||||
}
|
||||
const mysqlGzipLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:local';
|
||||
const mysqlRawLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:raw:local';
|
||||
const postgresqlGzipLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:gzip:local';
|
||||
const postgresqlRawLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:raw:local';
|
||||
|
||||
const minio = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:minio|http|min.arm.coolify.io|backups|<access_key>|<secret_key>';
|
||||
const digitalOcean = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:do|https|fra1.digitaloceanspaces.com|backups|<access_key>|<secret_key>';
|
||||
|
||||
const devContainers = [mysqlGzipLocal, mysqlRawLocal, postgresqlGzipLocal, postgresqlRawLocal]
|
||||
|
||||
const containers = isDev
|
||||
? devContainers
|
||||
: process.env.CONTAINERS_TO_BACKUP.split(',')
|
||||
|
||||
const backup = async (container) => {
|
||||
const id = cuid()
|
||||
const [name, backupType, type, zipped, storage] = container.split(':')
|
||||
const directory = `backups`;
|
||||
const filename = zipped === 'raw'
|
||||
? `${name}-${type}-${backupType}-${new Date().getTime()}.sql`
|
||||
: `${name}-${type}-${backupType}-${new Date().getTime()}.tgz`
|
||||
const backup = `${directory}/${filename}`;
|
||||
|
||||
try {
|
||||
await $`docker inspect ${name.split(' ')[0]}`.quiet()
|
||||
if (backupType === 'database') {
|
||||
if (type === 'mysql') {
|
||||
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||
const { stdout: rootPassword } = await $`docker exec ${name} printenv MYSQL_ROOT_PASSWORD`.quiet()
|
||||
if (zipped === 'raw') {
|
||||
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" > ${backup}`
|
||||
} else if (zipped === 'gzip') {
|
||||
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" | gzip > ${backup}`
|
||||
}
|
||||
}
|
||||
if (type === 'postgresql') {
|
||||
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||
const { stdout: userPassword } = await $`docker exec ${name} printenv POSTGRES_PASSWORD`
|
||||
const { stdout: user } = await $`docker exec ${name} printenv POSTGRES_USER`
|
||||
if (zipped === 'raw') {
|
||||
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()}> ${backup}`
|
||||
} else if (zipped === 'gzip') {
|
||||
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()} | gzip > ${backup}`
|
||||
}
|
||||
}
|
||||
const [storageType, ...storageArgs] = storage.split('|')
|
||||
if (storageType !== 'local') {
|
||||
let s3Protocol, s3Url, s3Bucket, s3Key, s3Secret = null
|
||||
if (storageArgs.length > 0) {
|
||||
[s3Protocol, s3Url, s3Bucket, s3Key, s3Secret] = storageArgs
|
||||
}
|
||||
if (storageType === 'minio') {
|
||||
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||
return
|
||||
}
|
||||
await $`mc alias set ${id} ${s3Protocol}://${s3Url} ${s3Key} ${s3Secret}`
|
||||
await $`mc stat ${id}`
|
||||
await $`mc cp ${backup} ${id}/${s3Bucket}`
|
||||
await $`rm ${backup}`
|
||||
await $`mc alias rm ${id}`
|
||||
} else if (storageType === 'do') {
|
||||
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||
return
|
||||
}
|
||||
console.log({ s3Protocol, s3Url, s3Bucket, s3Key, s3Secret })
|
||||
console.log(chalk.blue(`Uploading ${name}:${type} to DigitalOcean Spaces...`))
|
||||
const readstream = fs.createReadStream(backup)
|
||||
const bucketParams = {
|
||||
Bucket: s3Bucket,
|
||||
Key: filename,
|
||||
Body: readstream
|
||||
};
|
||||
const s3Client = new S3({
|
||||
forcePathStyle: false,
|
||||
endpoint: `${s3Protocol}://${s3Url}`,
|
||||
region: "us-east-1",
|
||||
credentials: {
|
||||
accessKeyId: s3Key,
|
||||
secretAccessKey: s3Secret
|
||||
},
|
||||
});
|
||||
try {
|
||||
const data = await s3Client.send(new PutObjectCommand(bucketParams));
|
||||
console.log(chalk.green("Successfully uploaded backup: " +
|
||||
bucketParams.Bucket +
|
||||
"/" +
|
||||
bucketParams.Key
|
||||
)
|
||||
);
|
||||
return data;
|
||||
} catch (err) {
|
||||
console.log("Error", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(chalk.green(`Backup of ${name}:${type} complete!`))
|
||||
} catch (error) {
|
||||
console.log(chalk.red(`Backup of ${name}:${type} failed!`))
|
||||
console.log(chalk.red(error))
|
||||
}
|
||||
}
|
||||
const promises = []
|
||||
for (const container of containers) {
|
||||
// await backup(container);
|
||||
promises.push(backup(container))
|
||||
}
|
||||
await Promise.all(promises)
|
0
backups/.gitkeep
Normal file
0
backups/.gitkeep
Normal file
@ -11,6 +11,7 @@ services:
|
||||
protocol: tcp
|
||||
mode: host
|
||||
volumes:
|
||||
- 'coolify-local-backup:/app/backups'
|
||||
- 'coolify-db:/app/db'
|
||||
- 'coolify-logs:/app/logs'
|
||||
- 'coolify-ssl-certs:/app/ssl'
|
||||
@ -34,6 +35,8 @@ networks:
|
||||
name: coolify-infra
|
||||
|
||||
volumes:
|
||||
coolify-local-backup:
|
||||
name: coolify-local-backup
|
||||
coolify-logs:
|
||||
name: coolify-logs
|
||||
coolify-db:
|
||||
|
1218
pnpm-lock.yaml
1218
pnpm-lock.yaml
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user