v1.0.7 (#32)
New features: - Automatic error reporting (enabled by default) - Increase build times by leveraging docker build caches - Fixes: - Fix error handling - Fix vue autodetect - Custom dockerfile is not the default Others: - Cleanup `logs-servers` collection, because old errors are not standardized - New Traefik proxy version - Standardized directory configurations
This commit is contained in:
parent
bad84289c4
commit
142b83cc13
1
.gitignore
vendored
1
.gitignore
vendored
@ -8,5 +8,4 @@ dist-ssr
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
yarn.lock
|
||||
.pnpm-store
|
@ -1,7 +1,7 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
if (process.env.NODE_ENV === 'production') server.register(require('./plugins/authentication'))
|
||||
server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
|
@ -2,18 +2,14 @@ const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw { error: 'No custom dockerfile found.', type: 'app' }
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw new Error('No custom dockerfile found.')
|
||||
}
|
||||
}
|
||||
|
@ -4,22 +4,19 @@ const buildImageNodeDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ${configuration.build.directory} ./`,
|
||||
`COPY ${configuration.build.directory}/package*.json .`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`COPY ./${configuration.build.directory} .`,
|
||||
`RUN ${configuration.build.command.build}`
|
||||
].join('\n')
|
||||
}
|
||||
async function buildImage (configuration) {
|
||||
try {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@ -7,24 +7,22 @@ const publishNodejsDocker = (configuration) => {
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ${configuration.build.directory} ./`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} .`
|
||||
: `
|
||||
COPY ${configuration.build.directory}/package*.json .
|
||||
RUN ${configuration.build.command.installation}
|
||||
COPY ./${configuration.build.directory} .`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
'CMD [ "yarn", "start" ]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
@ -6,21 +6,17 @@ const publishPHPDocker = (configuration) => {
|
||||
'FROM php:apache',
|
||||
'RUN a2enmod rewrite',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY .${configuration.build.directory} /var/www/html`,
|
||||
`COPY ./${configuration.build.directory} /var/www/html`,
|
||||
'EXPOSE 80',
|
||||
' CMD ["apache2-foreground"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
@ -37,28 +37,24 @@ const cacheRustDocker = (configuration, custom) => {
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
|
||||
const parsedToml = TOML.parse(cargoToml)
|
||||
const custom = {
|
||||
name: parsedToml.package.name
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
|
||||
|
||||
let stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:cache` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
|
||||
|
||||
stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
|
||||
const parsedToml = TOML.parse(cargoToml)
|
||||
const custom = {
|
||||
name: parsedToml.package.name
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
|
||||
|
||||
let stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:cache` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
|
||||
|
||||
stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
@ -9,24 +9,20 @@ const publishStaticDocker = (configuration) => {
|
||||
'COPY nginx.conf /etc/nginx/nginx.conf',
|
||||
'WORKDIR /usr/share/nginx/html',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ${configuration.build.directory} ./`,
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} .`
|
||||
: `COPY ./${configuration.build.directory} .`,
|
||||
'EXPOSE 80',
|
||||
'CMD ["nginx", "-g", "daemon off;"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
@ -9,22 +9,12 @@ module.exports = async function (configuration) {
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
|
||||
await execute(configuration)
|
||||
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} catch (error) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
if (error.stack) throw { error: error.stack, type: 'server' }
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
await execute(configuration)
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} else {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
@ -33,7 +23,6 @@ module.exports = async function (configuration) {
|
||||
} catch (error) {
|
||||
// Hmm.
|
||||
}
|
||||
|
||||
throw { error: 'No buildpack found.', type: 'app' }
|
||||
throw new Error('No buildpack found.')
|
||||
}
|
||||
}
|
||||
|
@ -3,39 +3,25 @@ const { execShellAsync } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeImagesContainers () {
|
||||
try {
|
||||
await execShellAsync('docker container prune -f')
|
||||
await execShellAsync('docker image prune -f --filter=label!=coolify-reserve=true')
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
await execShellAsync('docker container prune -f')
|
||||
await execShellAsync('docker image prune -f --filter=label!=coolify-reserve=true')
|
||||
}
|
||||
|
||||
async function cleanupStuckedDeploymentsInDB (configuration) {
|
||||
const { id } = configuration.repository
|
||||
const deployId = configuration.general.deployId
|
||||
try {
|
||||
// Cleanup stucked deployments.
|
||||
const deployments = await Deployment.find({ repoId: id, deployId: { $ne: deployId }, progress: { $in: ['queued', 'inprogress'] } })
|
||||
for (const deployment of deployments) {
|
||||
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } })
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
async function cleanupStuckedDeploymentsInDB () {
|
||||
// Cleanup stucked deployments.
|
||||
await Deployment.updateMany(
|
||||
{ progress: { $in: ['queued', 'inprogress'] } },
|
||||
{ progress: 'failed' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
try {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { cleanupStuckedDeploymentsInDB, deleteSameDeployments, purgeImagesContainers }
|
||||
|
@ -9,69 +9,64 @@ function getUniq () {
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
try {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 3
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first',
|
||||
failure_action: 'rollback'
|
||||
}
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 3
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first',
|
||||
failure_action: 'rollback'
|
||||
}
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
} else if (configuration.build.pack === 'rust') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
|
||||
if (!configuration.build.directory) {
|
||||
configuration.build.directory = '/'
|
||||
}
|
||||
if (!configuration.publish.directory) {
|
||||
configuration.publish.directory = '/'
|
||||
}
|
||||
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
} else if (configuration.build.pack === 'rust') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
|
||||
if (!configuration.build.directory) configuration.build.directory = ''
|
||||
if (configuration.build.directory.startsWith('/')) configuration.build.directory = configuration.build.directory.replace('/', '')
|
||||
|
||||
if (!configuration.publish.directory) configuration.publish.directory = ''
|
||||
if (configuration.publish.directory.startsWith('/')) configuration.publish.directory = configuration.publish.directory.replace('/', '')
|
||||
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration) {
|
||||
@ -86,12 +81,8 @@ async function updateServiceLabels (configuration) {
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
try {
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,6 +59,6 @@ module.exports = async function (configuration) {
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
|
@ -6,77 +6,71 @@ const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
|
||||
module.exports = async function (configuration, imageChanged) {
|
||||
try {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
await saveAppLog(`Error occured during deployment: ${error.message}`, configuration)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
}
|
||||
|
@ -15,30 +15,24 @@ module.exports = async function (configuration) {
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
try {
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
} catch (error) {
|
||||
cleanupTmp(workdir)
|
||||
if (error.stack) console.log(error.stack)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { saveServerLog } = require('../logging')
|
||||
const { cleanupTmp } = require('../common')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
@ -8,37 +7,26 @@ const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { cleanupStuckedDeploymentsInDB, purgeImagesContainers } = require('./cleanup')
|
||||
const { purgeImagesContainers } = require('./cleanup')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname, workdir } = configuration.general
|
||||
try {
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration)
|
||||
cleanupTmp(workdir)
|
||||
await purgeImagesContainers()
|
||||
} catch (error) {
|
||||
await cleanupStuckedDeploymentsInDB(configuration)
|
||||
cleanupTmp(workdir)
|
||||
const { type } = error.error
|
||||
if (type === 'app') {
|
||||
await saveAppLog(error.error, configuration, true)
|
||||
} else {
|
||||
await saveServerLog({ event: error.error, configuration })
|
||||
}
|
||||
}
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration)
|
||||
cleanupTmp(workdir)
|
||||
await purgeImagesContainers()
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
|
@ -8,24 +8,21 @@ const docker = {
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
reject(event.error)
|
||||
return
|
||||
}
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
saveAppLog(event.error, configuration, true)
|
||||
reject(event.error)
|
||||
} else if (event.stream) {
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
|
75
api/libs/http-error/handlers.js
Normal file
75
api/libs/http-error/handlers.js
Normal file
@ -0,0 +1,75 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.handleErrors = exports.handleValidationError = exports.handleNotFoundError = void 0;
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
const validation_1 = require("./validation");
|
||||
function handleNotFoundError(request, reply) {
|
||||
handleErrors(new http_errors_enhanced_1.NotFoundError('Not found.'), request, reply);
|
||||
}
|
||||
exports.handleNotFoundError = handleNotFoundError;
|
||||
function handleValidationError(error, request) {
|
||||
/*
|
||||
As seen in https://github.com/fastify/fastify/blob/master/lib/validation.js
|
||||
the error.message will always start with the relative section (params, querystring, headers, body)
|
||||
and fastify throws on first failing section.
|
||||
*/
|
||||
const section = error.message.match(/^\w+/)[0];
|
||||
return new http_errors_enhanced_1.BadRequestError('One or more validations failed trying to process your request.', {
|
||||
failedValidations: validation_1.convertValidationErrors(section, Reflect.get(request, section), error.validation)
|
||||
});
|
||||
}
|
||||
exports.handleValidationError = handleValidationError;
|
||||
function handleErrors(error, request, reply) {
|
||||
var _a, _b;
|
||||
// It is a generic error, handle it
|
||||
const code = error.code;
|
||||
if (!('statusCode' in error)) {
|
||||
if ('validation' in error && ((_a = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _a === void 0 ? void 0 : _a.convertValidationErrors)) {
|
||||
// If it is a validation error, convert errors to human friendly format
|
||||
error = handleValidationError(error, request);
|
||||
}
|
||||
else if ((_b = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _b === void 0 ? void 0 : _b.hideUnhandledErrors) {
|
||||
// It is requested to hide the error, just log it and then create a generic one
|
||||
request.log.error({ error: http_errors_enhanced_1.serializeError(error) });
|
||||
error = new http_errors_enhanced_1.InternalServerError('An error occurred trying to process your request.');
|
||||
}
|
||||
else {
|
||||
// Wrap in a HttpError, making the stack explicitily available
|
||||
error = new http_errors_enhanced_1.InternalServerError(http_errors_enhanced_1.serializeError(error));
|
||||
Object.defineProperty(error, 'stack', { enumerable: true });
|
||||
}
|
||||
}
|
||||
else if (code === 'INVALID_CONTENT_TYPE' || code === 'FST_ERR_CTP_INVALID_MEDIA_TYPE') {
|
||||
error = new http_errors_enhanced_1.UnsupportedMediaTypeError(utils_1.upperFirst(validation_1.validationMessagesFormatters.contentType()));
|
||||
}
|
||||
else if (code === 'FST_ERR_CTP_EMPTY_JSON_BODY') {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.jsonEmpty()));
|
||||
}
|
||||
else if (code === 'MALFORMED_JSON' || error.message === 'Invalid JSON' || error.stack.includes('at JSON.parse')) {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.json()));
|
||||
}
|
||||
// Get the status code
|
||||
let { statusCode, headers } = error;
|
||||
// Code outside HTTP range
|
||||
if (statusCode < 100 || statusCode > 599) {
|
||||
statusCode = http_errors_enhanced_1.INTERNAL_SERVER_ERROR;
|
||||
}
|
||||
// Create the body
|
||||
const body = {
|
||||
statusCode,
|
||||
error: http_errors_enhanced_1.messagesByCodes[statusCode],
|
||||
message: error.message
|
||||
};
|
||||
http_errors_enhanced_1.addAdditionalProperties(body, error);
|
||||
// Send the error back
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
reply
|
||||
.code(statusCode)
|
||||
.headers(headers !== null && headers !== void 0 ? headers : {})
|
||||
.type('application/json')
|
||||
.send(body);
|
||||
}
|
||||
exports.handleErrors = handleErrors;
|
58
api/libs/http-error/index.js
Normal file
58
api/libs/http-error/index.js
Normal file
@ -0,0 +1,58 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.plugin = exports.validationMessagesFormatters = exports.niceJoin = exports.convertValidationErrors = void 0;
|
||||
const fastify_plugin_1 = __importDefault(require("fastify-plugin"));
|
||||
const handlers_1 = require("./handlers");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const validation_1 = require("./validation");
|
||||
__exportStar(require("./handlers"), exports);
|
||||
__exportStar(require("./interfaces"), exports);
|
||||
var validation_2 = require("./validation");
|
||||
Object.defineProperty(exports, "convertValidationErrors", { enumerable: true, get: function () { return validation_2.convertValidationErrors; } });
|
||||
Object.defineProperty(exports, "niceJoin", { enumerable: true, get: function () { return validation_2.niceJoin; } });
|
||||
Object.defineProperty(exports, "validationMessagesFormatters", { enumerable: true, get: function () { return validation_2.validationMessagesFormatters; } });
|
||||
exports.plugin = fastify_plugin_1.default(function (instance, options, done) {
|
||||
var _a, _b, _c, _d;
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const convertResponsesValidationErrors = (_a = options.convertResponsesValidationErrors) !== null && _a !== void 0 ? _a : !isProduction;
|
||||
const configuration = {
|
||||
hideUnhandledErrors: (_b = options.hideUnhandledErrors) !== null && _b !== void 0 ? _b : isProduction,
|
||||
convertValidationErrors: (_c = options.convertValidationErrors) !== null && _c !== void 0 ? _c : true,
|
||||
responseValidatorCustomizer: options.responseValidatorCustomizer,
|
||||
allowUndeclaredResponses: (_d = options.allowUndeclaredResponses) !== null && _d !== void 0 ? _d : false
|
||||
};
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.decorateRequest(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.addHook('onRequest', async (request) => {
|
||||
request[interfaces_1.kHttpErrorsEnhancedConfiguration] = configuration;
|
||||
});
|
||||
instance.setErrorHandler(handlers_1.handleErrors);
|
||||
// instance.setNotFoundHandler(handlers_1.handleNotFoundError);
|
||||
if (convertResponsesValidationErrors) {
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedResponseValidations, []);
|
||||
instance.addHook('onRoute', validation_1.addResponseValidation);
|
||||
instance.addHook('onReady', validation_1.compileResponseValidationSchema.bind(instance, configuration));
|
||||
}
|
||||
done();
|
||||
}, { name: 'fastify-http-errors-enhanced' });
|
||||
exports.default = exports.plugin;
|
||||
// Fix CommonJS exporting
|
||||
/* istanbul ignore else */
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = exports.plugin;
|
||||
Object.assign(module.exports, exports);
|
||||
}
|
6
api/libs/http-error/interfaces.js
Normal file
6
api/libs/http-error/interfaces.js
Normal file
@ -0,0 +1,6 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.kHttpErrorsEnhancedResponseValidations = exports.kHttpErrorsEnhancedConfiguration = void 0;
|
||||
exports.kHttpErrorsEnhancedConfiguration = Symbol('fastify-http-errors-enhanced-configuration');
|
||||
exports.kHttpErrorsEnhancedResponseValidations = Symbol('fastify-http-errors-enhanced-response-validation');
|
31
api/libs/http-error/utils.js
Normal file
31
api/libs/http-error/utils.js
Normal file
@ -0,0 +1,31 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.get = exports.upperFirst = void 0;
|
||||
function upperFirst(source) {
|
||||
if (typeof source !== 'string' || !source.length) {
|
||||
return source;
|
||||
}
|
||||
return source[0].toUpperCase() + source.substring(1);
|
||||
}
|
||||
exports.upperFirst = upperFirst;
|
||||
function get(target, path) {
|
||||
var _a;
|
||||
const tokens = path.split('.').map((t) => t.trim());
|
||||
for (const token of tokens) {
|
||||
if (typeof target === 'undefined' || target === null) {
|
||||
// We're supposed to be still iterating, but the chain is over - Return undefined
|
||||
target = undefined;
|
||||
break;
|
||||
}
|
||||
const index = token.match(/^(\d+)|(?:\[(\d+)\])$/);
|
||||
if (index) {
|
||||
target = target[parseInt((_a = index[1]) !== null && _a !== void 0 ? _a : index[2], 10)];
|
||||
}
|
||||
else {
|
||||
target = target[token];
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
exports.get = get;
|
239
api/libs/http-error/validation.js
Normal file
239
api/libs/http-error/validation.js
Normal file
@ -0,0 +1,239 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.compileResponseValidationSchema = exports.addResponseValidation = exports.convertValidationErrors = exports.validationMessagesFormatters = exports.niceJoin = void 0;
|
||||
const ajv_1 = __importDefault(require("ajv"));
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
function niceJoin(array, lastSeparator = ' and ', separator = ', ') {
|
||||
switch (array.length) {
|
||||
case 0:
|
||||
return '';
|
||||
case 1:
|
||||
return array[0];
|
||||
case 2:
|
||||
return array.join(lastSeparator);
|
||||
default:
|
||||
return array.slice(0, array.length - 1).join(separator) + lastSeparator + array[array.length - 1];
|
||||
}
|
||||
}
|
||||
exports.niceJoin = niceJoin;
|
||||
exports.validationMessagesFormatters = {
|
||||
contentType: () => 'only JSON payloads are accepted. Please set the "Content-Type" header to start with "application/json"',
|
||||
json: () => 'the body payload is not a valid JSON',
|
||||
jsonEmpty: () => 'the JSON body payload cannot be empty if the "Content-Type" header is set',
|
||||
missing: () => 'must be present',
|
||||
unknown: () => 'is not a valid property',
|
||||
uuid: () => 'must be a valid GUID (UUID v4)',
|
||||
timestamp: () => 'must be a valid ISO 8601 / RFC 3339 timestamp (example: 2018-07-06T12:34:56Z)',
|
||||
date: () => 'must be a valid ISO 8601 / RFC 3339 date (example: 2018-07-06)',
|
||||
time: () => 'must be a valid ISO 8601 / RFC 3339 time (example: 12:34:56)',
|
||||
uri: () => 'must be a valid URI',
|
||||
hostname: () => 'must be a valid hostname',
|
||||
ipv4: () => 'must be a valid IPv4',
|
||||
ipv6: () => 'must be a valid IPv6',
|
||||
paramType: (type) => {
|
||||
switch (type) {
|
||||
case 'integer':
|
||||
return 'must be a valid integer number';
|
||||
case 'number':
|
||||
return 'must be a valid number';
|
||||
case 'boolean':
|
||||
return 'must be a valid boolean (true or false)';
|
||||
case 'object':
|
||||
return 'must be a object';
|
||||
case 'array':
|
||||
return 'must be an array';
|
||||
default:
|
||||
return 'must be a string';
|
||||
}
|
||||
},
|
||||
presentString: () => 'must be a non empty string',
|
||||
minimum: (min) => `must be a number greater than or equal to ${min}`,
|
||||
maximum: (max) => `must be a number less than or equal to ${max}`,
|
||||
minimumProperties(min) {
|
||||
return min === 1 ? 'cannot be a empty object' : `must be a object with at least ${min} properties`;
|
||||
},
|
||||
maximumProperties(max) {
|
||||
return max === 0 ? 'must be a empty object' : `must be a object with at most ${max} properties`;
|
||||
},
|
||||
minimumItems(min) {
|
||||
return min === 1 ? 'cannot be a empty array' : `must be an array with at least ${min} items`;
|
||||
},
|
||||
maximumItems(max) {
|
||||
return max === 0 ? 'must be a empty array' : `must be an array with at most ${max} items`;
|
||||
},
|
||||
enum: (values) => `must be one of the following values: ${niceJoin(values.map((f) => `"${f}"`), ' or ')}`,
|
||||
pattern: (pattern) => `must match pattern "${pattern.replace(/\(\?:/g, '(')}"`,
|
||||
invalidResponseCode: (code) => `This endpoint cannot respond with HTTP status ${code}.`,
|
||||
invalidResponse: (code) => `The response returned from the endpoint violates its specification for the HTTP status ${code}.`,
|
||||
invalidFormat: (format) => `must match format "${format}" (format)`
|
||||
};
|
||||
function convertValidationErrors(section, data, validationErrors) {
|
||||
const errors = {};
|
||||
if (section === 'querystring') {
|
||||
section = 'query';
|
||||
}
|
||||
// For each error
|
||||
for (const e of validationErrors) {
|
||||
let message = '';
|
||||
let pattern;
|
||||
let value;
|
||||
let reason;
|
||||
// Normalize the key
|
||||
let key = e.dataPath;
|
||||
if (key.startsWith('.')) {
|
||||
key = key.substring(1);
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.startsWith('[') && key.endsWith(']')) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Depending on the type
|
||||
switch (e.keyword) {
|
||||
case 'required':
|
||||
case 'dependencies':
|
||||
key = e.params.missingProperty;
|
||||
message = exports.validationMessagesFormatters.missing();
|
||||
break;
|
||||
case 'additionalProperties':
|
||||
key = e.params.additionalProperty;
|
||||
message = exports.validationMessagesFormatters.unknown();
|
||||
break;
|
||||
case 'type':
|
||||
message = exports.validationMessagesFormatters.paramType(e.params.type);
|
||||
break;
|
||||
case 'minProperties':
|
||||
message = exports.validationMessagesFormatters.minimumProperties(e.params.limit);
|
||||
break;
|
||||
case 'maxProperties':
|
||||
message = exports.validationMessagesFormatters.maximumProperties(e.params.limit);
|
||||
break;
|
||||
case 'minItems':
|
||||
message = exports.validationMessagesFormatters.minimumItems(e.params.limit);
|
||||
break;
|
||||
case 'maxItems':
|
||||
message = exports.validationMessagesFormatters.maximumItems(e.params.limit);
|
||||
break;
|
||||
case 'minimum':
|
||||
message = exports.validationMessagesFormatters.minimum(e.params.limit);
|
||||
break;
|
||||
case 'maximum':
|
||||
message = exports.validationMessagesFormatters.maximum(e.params.limit);
|
||||
break;
|
||||
case 'enum':
|
||||
message = exports.validationMessagesFormatters.enum(e.params.allowedValues);
|
||||
break;
|
||||
case 'pattern':
|
||||
pattern = e.params.pattern;
|
||||
value = utils_1.get(data, key);
|
||||
if (pattern === '.+' && !value) {
|
||||
message = exports.validationMessagesFormatters.presentString();
|
||||
}
|
||||
else {
|
||||
message = exports.validationMessagesFormatters.pattern(e.params.pattern);
|
||||
}
|
||||
break;
|
||||
case 'format':
|
||||
reason = e.params.format;
|
||||
// Normalize the key
|
||||
if (reason === 'date-time') {
|
||||
reason = 'timestamp';
|
||||
}
|
||||
message = (exports.validationMessagesFormatters[reason] || exports.validationMessagesFormatters.invalidFormat)(reason);
|
||||
break;
|
||||
}
|
||||
// No custom message was found, default to input one replacing the starting verb and adding some path info
|
||||
if (!message.length) {
|
||||
message = `${e.message.replace(/^should/, 'must')} (${e.keyword})`;
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.match(/(?:^['"])(?:[^.]+)(?:['"]$)/)) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Fix empty properties
|
||||
if (!key) {
|
||||
key = '$root';
|
||||
}
|
||||
key = key.replace(/^\//, '');
|
||||
errors[key] = message;
|
||||
}
|
||||
return { [section]: errors };
|
||||
}
|
||||
exports.convertValidationErrors = convertValidationErrors;
|
||||
function addResponseValidation(route) {
|
||||
var _a;
|
||||
if (!((_a = route.schema) === null || _a === void 0 ? void 0 : _a.response)) {
|
||||
return;
|
||||
}
|
||||
const validators = {};
|
||||
/*
|
||||
Add these validators to the list of the one to compile once the server is started.
|
||||
This makes possible to handle shared schemas.
|
||||
*/
|
||||
this[interfaces_1.kHttpErrorsEnhancedResponseValidations].push([
|
||||
this,
|
||||
validators,
|
||||
Object.entries(route.schema.response)
|
||||
]);
|
||||
// Note that this hook is not called for non JSON payloads therefore validation is not possible in such cases
|
||||
route.preSerialization = async function (request, reply, payload) {
|
||||
const statusCode = reply.raw.statusCode;
|
||||
// Never validate error 500
|
||||
if (statusCode === http_errors_enhanced_1.INTERNAL_SERVER_ERROR) {
|
||||
return payload;
|
||||
}
|
||||
// No validator, it means the HTTP status is not allowed
|
||||
const validator = validators[statusCode];
|
||||
if (!validator) {
|
||||
if (request[interfaces_1.kHttpErrorsEnhancedConfiguration].allowUndeclaredResponses) {
|
||||
return payload;
|
||||
}
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponseCode(statusCode));
|
||||
}
|
||||
// Now validate the payload
|
||||
const valid = validator(payload);
|
||||
if (!valid) {
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponse(statusCode), {
|
||||
failedValidations: convertValidationErrors('response', payload, validator.errors)
|
||||
});
|
||||
}
|
||||
return payload;
|
||||
};
|
||||
}
|
||||
exports.addResponseValidation = addResponseValidation;
|
||||
function compileResponseValidationSchema(configuration) {
|
||||
// Fix CJS/ESM interoperability
|
||||
// @ts-expect-error
|
||||
let AjvConstructor = ajv_1.default;
|
||||
/* istanbul ignore next */
|
||||
if (AjvConstructor.default) {
|
||||
AjvConstructor = AjvConstructor.default;
|
||||
}
|
||||
const hasCustomizer = typeof configuration.responseValidatorCustomizer === 'function';
|
||||
for (const [instance, validators, schemas] of this[interfaces_1.kHttpErrorsEnhancedResponseValidations]) {
|
||||
// @ts-expect-error
|
||||
const compiler = new AjvConstructor({
|
||||
// The fastify defaults, with the exception of removeAdditional and coerceTypes, which have been reversed
|
||||
removeAdditional: false,
|
||||
useDefaults: true,
|
||||
coerceTypes: false,
|
||||
allErrors: true
|
||||
});
|
||||
compiler.addSchema(Object.values(instance.getSchemas()));
|
||||
compiler.addKeyword('example');
|
||||
if (hasCustomizer) {
|
||||
configuration.responseValidatorCustomizer(compiler);
|
||||
}
|
||||
for (const [code, schema] of schemas) {
|
||||
validators[code] = compiler.compile(schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.compileResponseValidationSchema = compileResponseValidationSchema;
|
@ -1,10 +1,18 @@
|
||||
const dayjs = require('dayjs')
|
||||
const axios = require('axios')
|
||||
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const dayjs = require('dayjs')
|
||||
const Settings = require('../models/Settings')
|
||||
const { version } = require('../../package.json')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
const patterns = [
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
||||
].join('|')
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
@ -12,25 +20,12 @@ async function saveAppLog (event, configuration, isError) {
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
// console.log(event, config, isError)
|
||||
let clearedEvent = null
|
||||
|
||||
if (event.error) clearedEvent = '[ERROR] ' + generateTimestamp() + event.error.replace(/(\r\n|\n|\r)/gm, '')
|
||||
else if (event) clearedEvent = '[ERROR] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
const clearedEvent = '[ERROR 😱] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
@ -39,20 +34,14 @@ async function saveAppLog (event, configuration, isError) {
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog ({ event, configuration, type }) {
|
||||
try {
|
||||
if (configuration) {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: `[SERVER ERROR 😖]: ${event}` }).save()
|
||||
}
|
||||
await new ServerLog({ event, type }).save()
|
||||
} catch (error) {
|
||||
// Hmm.
|
||||
}
|
||||
}
|
||||
async function saveServerLog (error) {
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const payload = { message: error.message, stack: error.stack, type: error.type || 'spaghetticode', version }
|
||||
|
||||
const found = await ServerLog.find(payload)
|
||||
if (found.length === 0 && error.message) await new ServerLog(payload).save()
|
||||
if (settings && settings.sendErrors && process.env.NODE_ENV === 'production') await axios.post('https://errors.coollabs.io/api/error', payload)
|
||||
}
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
|
@ -2,10 +2,11 @@ const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, required: true, default: version },
|
||||
type: { type: String, required: true, enum: ['API', 'UPGRADE-P-1', 'UPGRADE-P-2'], default: 'API' },
|
||||
event: { type: String, required: true },
|
||||
seen: { type: Boolean, required: true, default: false }
|
||||
version: { type: String, default: version },
|
||||
type: { type: String, required: true },
|
||||
message: { type: String, required: true },
|
||||
stack: { type: String },
|
||||
seen: { type: Boolean, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
@ -3,7 +3,8 @@ const mongoose = require('mongoose')
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false }
|
||||
allowRegistration: { type: Boolean, required: true, default: false },
|
||||
sendErrors: { type: Boolean, required: true, default: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
@ -1,15 +1,11 @@
|
||||
|
||||
const { verifyUserId } = require('../../../libs/common')
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
try {
|
||||
if (!await verifyUserId(request.headers.authorization)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
@ -34,7 +30,8 @@ module.exports = async function (fastify) {
|
||||
}
|
||||
return { message: 'OK' }
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1,37 +1,16 @@
|
||||
|
||||
const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../../libs/logging')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// const postSchema = {
|
||||
// body: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// ref: { type: "string" },
|
||||
// repository: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// full_name: { type: "string" },
|
||||
// },
|
||||
// required: ["id", "full_name"],
|
||||
// },
|
||||
// installation: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// },
|
||||
// required: ["id"],
|
||||
// },
|
||||
// },
|
||||
// required: ["ref", "repository", "installation"],
|
||||
// },
|
||||
// };
|
||||
fastify.post('/', async (request, reply) => {
|
||||
let configuration
|
||||
try {
|
||||
await verifyUserId(request.headers.authorization)
|
||||
} catch (error) {
|
||||
@ -40,7 +19,10 @@ module.exports = async function (fastify) {
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
configuration = setDefaultConfiguration(request.body)
|
||||
if (!configuration) {
|
||||
throw new Error('Whaat?')
|
||||
}
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
@ -64,11 +46,21 @@ module.exports = async function (fastify) {
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name, deployId: configuration.general.deployId })
|
||||
await queueAndBuild(configuration, imageChanged)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name) {
|
||||
if (error.message && error.stack) await saveServerLog(error)
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ module.exports = async function (fastify) {
|
||||
})
|
||||
return finalLogs
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
@ -8,7 +9,8 @@ module.exports = async function (fastify) {
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
@ -21,10 +22,8 @@ module.exports = async function (fastify) {
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
const serverLogs = await ServerLog.find()
|
||||
const services = await docker.engine.listServices()
|
||||
|
||||
let applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
@ -56,7 +55,8 @@ module.exports = async function (fastify) {
|
||||
if (error.code === 'ENOENT' && error.errno === -2) {
|
||||
throw new Error(`Docker service unavailable at ${error.address}.`)
|
||||
} else {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -3,6 +3,7 @@ const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
@ -165,7 +166,8 @@ module.exports = async function (fastify) {
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -4,6 +4,8 @@ const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
@ -59,8 +61,12 @@ module.exports = async function (fastify) {
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
const defaultSettings = new Settings({
|
||||
_id: new mongoose.Types.ObjectId()
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
await defaultSettings.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
@ -111,8 +117,8 @@ module.exports = async function (fastify) {
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
reply.code(500).send({ success: false, error: error.message })
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
|
@ -8,7 +8,7 @@ module.exports = async function (fastify) {
|
||||
serverLogs
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1,13 +1,16 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' }
|
||||
allowRegistration: { type: 'boolean' },
|
||||
sendErrors: { type: 'boolean' }
|
||||
},
|
||||
required: ['allowRegistration']
|
||||
required: []
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,7 +28,8 @@ module.exports = async function (fastify) {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
@ -38,7 +42,8 @@ module.exports = async function (fastify) {
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -4,9 +4,9 @@ const { saveServerLog } = require('../../../libs/logging')
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"')
|
||||
await saveServerLog({ event: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
await saveServerLog({ message: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"')
|
||||
await saveServerLog({ event: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
await saveServerLog({ message: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
|
@ -1,6 +1,10 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp } = require('../../../libs/common')
|
||||
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
@ -33,6 +37,7 @@ module.exports = async function (fastify) {
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
let configuration
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
@ -48,7 +53,7 @@ module.exports = async function (fastify) {
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
let configuration = services.find(r => {
|
||||
configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
@ -88,12 +93,27 @@ module.exports = async function (fastify) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name === 'Error') {
|
||||
// Error during runtime
|
||||
await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
} else {
|
||||
// Error in my code
|
||||
const payload = { message: error.message, stack: error.stack, type: 'spaghetticode' }
|
||||
if (error.message && error.stack) await new ServerLog(payload).save()
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1,21 +1,26 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const axios = require('axios')
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const { execShellAsync } = require('./libs/common')
|
||||
const { purgeImagesContainers, cleanupStuckedDeploymentsInDB } = require('./libs/applications/cleanup')
|
||||
const Deployment = require('./models/Deployment')
|
||||
const fastify = require('fastify')({
|
||||
logger: { level: 'error' }
|
||||
trustProxy: true,
|
||||
logger: {
|
||||
level: 'error'
|
||||
}
|
||||
})
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
fastify.register(require('../api/libs/http-error'))
|
||||
|
||||
const { schema } = require('./schema')
|
||||
|
||||
process.on('unhandledRejection', (reason, p) => {
|
||||
console.log(reason)
|
||||
console.log(p)
|
||||
process.on('unhandledRejection', async (reason, p) => {
|
||||
await saveServerLog({ message: reason.message, type: 'unhandledRejection' })
|
||||
})
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
@ -36,18 +41,6 @@ if (process.env.NODE_ENV === 'production') {
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
fastify.setErrorHandler(async (error, request, reply) => {
|
||||
if (error.statusCode) {
|
||||
reply.status(error.statusCode).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
} else {
|
||||
reply.status(500).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
}
|
||||
try {
|
||||
await saveServerLog({ event: error })
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
@ -91,6 +84,14 @@ mongoose.connection.once('open', async function () {
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
try {
|
||||
const { main } = (await axios.get('https://get.coollabs.io/version.json')).data.coolify
|
||||
if (main.clearServerLogs) {
|
||||
await mongoose.connection.db.dropCollection('logs-servers')
|
||||
}
|
||||
} catch (error) {
|
||||
// Could not cleanup logs-servers collection
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
try {
|
||||
await cleanupStuckedDeploymentsInDB()
|
||||
@ -98,8 +99,8 @@ mongoose.connection.once('open', async function () {
|
||||
// Could not cleanup DB 🤔
|
||||
}
|
||||
try {
|
||||
// Doing because I do not want to prune these images. Prune skip coolify-reserve labeled images.
|
||||
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04']
|
||||
// Doing because I do not want to prune these images. Prune skips coolify-reserve labeled images.
|
||||
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04', 'php:apache', 'rust:latest']
|
||||
for (const image of basicImages) {
|
||||
await execShellAsync(`echo "FROM ${image}" | docker build --label coolify-reserve=true -t ${image} -`)
|
||||
}
|
||||
|
@ -1,24 +1,15 @@
|
||||
FROM ubuntu:20.04 as binaries
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
WORKDIR /usr/src/app
|
||||
RUN curl -fsSL https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz | tar -xzvf - docker/docker -C . --strip-components 1
|
||||
RUN mv /usr/src/app/docker /usr/bin/docker
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
|
||||
FROM node:lts
|
||||
WORKDIR /usr/src/app
|
||||
LABEL coolify-preserve=true
|
||||
COPY --from=binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
COPY --from=binaries /usr/bin/jq /usr/bin/jq
|
||||
COPY . .
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq /usr/bin/docker
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm
|
||||
COPY ./*package.json .
|
||||
RUN pnpm install
|
||||
COPY . .
|
||||
RUN pnpm build
|
||||
RUN rm -fr node_modules .pnpm-store
|
||||
RUN pnpm install -P
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
73
install/coolify-template-dev.yml
Normal file
73
install/coolify-template-dev.yml
Normal file
@ -0,0 +1,73 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 8080
|
||||
published: 8080
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --api.dashboard=true
|
||||
- --api.debug=true
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
@ -2,7 +2,7 @@ version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.3
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.7",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
@ -19,6 +19,7 @@
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@roxi/routify": "^2.15.1",
|
||||
"@zerodevx/svelte-toast": "^0.2.1",
|
||||
"ajv": "^8.1.0",
|
||||
"axios": "^0.21.1",
|
||||
"commander": "^7.2.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
@ -33,6 +34,7 @@
|
||||
"fastify-plugin": "^3.0.0",
|
||||
"fastify-static": "^4.0.1",
|
||||
"generate-password": "^1.6.0",
|
||||
"http-errors-enhanced": "^0.7.0",
|
||||
"js-yaml": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"mongoose": "^5.12.3",
|
||||
|
647
pnpm-lock.yaml
generated
647
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -63,7 +63,7 @@
|
||||
<div class="grid grid-flow-row">
|
||||
<label for="Path"
|
||||
>Path <TooltipInfo
|
||||
label="{`Path to deploy your application on your domain. eg: /api means it will be deployed to -> https://${$application.publish.domain}/api`}"
|
||||
label="{`Path to deploy your application on your domain. eg: /api means it will be deployed to -> https://${$application.publish.domain || '<yourdomain>'}/api`}"
|
||||
/></label
|
||||
>
|
||||
<input
|
||||
@ -92,7 +92,7 @@
|
||||
<input
|
||||
id="baseDir"
|
||||
bind:value="{$application.build.directory}"
|
||||
placeholder="/"
|
||||
placeholder="eg: sourcedir"
|
||||
/>
|
||||
</div>
|
||||
<div class="grid grid-flow-row">
|
||||
@ -104,7 +104,7 @@
|
||||
<input
|
||||
id="publishDir"
|
||||
bind:value="{$application.publish.directory}"
|
||||
placeholder="/"
|
||||
placeholder="eg: dist, _site, public"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -157,7 +157,7 @@
|
||||
$application.publish.path}"
|
||||
>{$application.publish.domain
|
||||
? `${$application.publish.domain}${$application.publish.path !== '/' ? $application.publish.path : ''}`
|
||||
: "Loading..."}</a
|
||||
: "<yourdomain>"}</a
|
||||
>
|
||||
<a
|
||||
target="_blank"
|
||||
|
@ -57,10 +57,7 @@
|
||||
f => f.type === "file" && f.name === "Cargo.toml",
|
||||
);
|
||||
|
||||
if (Dockerfile) {
|
||||
$application.build.pack = "custom";
|
||||
toast.push("Custom Dockerfile found. Build pack set to custom.");
|
||||
} else if (packageJson) {
|
||||
if (packageJson) {
|
||||
const { content } = await $fetch(packageJson.git_url);
|
||||
const packageJsonContent = JSON.parse(atob(content));
|
||||
const checkPackageJSONContents = dep => {
|
||||
@ -73,17 +70,9 @@
|
||||
if (checkPackageJSONContents(dep)) {
|
||||
const config = templates[dep];
|
||||
$application.build.pack = config.pack;
|
||||
if (config.installation) {
|
||||
$application.build.command.installation = config.installation;
|
||||
}
|
||||
|
||||
if (config.port) {
|
||||
$application.publish.port = config.port;
|
||||
}
|
||||
|
||||
if (config.directory) {
|
||||
$application.publish.directory = config.directory;
|
||||
}
|
||||
if (config.installation) $application.build.command.installation = config.installation;
|
||||
if (config.port) $application.publish.port = config.port;
|
||||
if (config.directory) $application.publish.directory = config.directory;
|
||||
|
||||
if (
|
||||
packageJsonContent.scripts.hasOwnProperty("build") &&
|
||||
@ -97,6 +86,9 @@
|
||||
} else if (CargoToml) {
|
||||
$application.build.pack = "rust";
|
||||
toast.push(`Rust language detected. Default values set.`);
|
||||
} else if (Dockerfile) {
|
||||
$application.build.pack = "custom";
|
||||
toast.push("Custom Dockerfile found. Build pack set to custom.");
|
||||
}
|
||||
} catch (error) {
|
||||
// Nothing detected
|
||||
|
@ -22,6 +22,11 @@ body {
|
||||
border-image: linear-gradient(0.25turn, rgba(255, 249, 34), rgba(255, 0, 128), rgba(56, 2, 155, 0));
|
||||
border-image-slice: 1;
|
||||
}
|
||||
.border-gradient-full {
|
||||
border: 4px solid transparent;
|
||||
border-image: linear-gradient(0.25turn, rgba(255, 249, 34), rgba(255, 0, 128), rgba(56, 2, 155, 0));
|
||||
border-image-slice: 1;
|
||||
}
|
||||
|
||||
[aria-label][role~="tooltip"]::after {
|
||||
background: rgba(41, 37, 36, 0.9);
|
||||
|
@ -17,9 +17,37 @@
|
||||
let upgradeDisabled = false;
|
||||
let upgradeDone = false;
|
||||
let latest = {};
|
||||
let showAck = false;
|
||||
const branch =
|
||||
process.env.NODE_ENV === "production" &&
|
||||
window.location.hostname !== "test.andrasbacsai.dev"
|
||||
? "main"
|
||||
: "next";
|
||||
onMount(async () => {
|
||||
if ($session.token) upgradeAvailable = await checkUpgrade();
|
||||
if ($session.token) {
|
||||
upgradeAvailable = await checkUpgrade();
|
||||
if (!localStorage.getItem("automaticErrorReportsAck")) {
|
||||
showAck = true;
|
||||
if (latest?.coolify[branch]?.settings?.sendErrors) {
|
||||
const settings = {
|
||||
sendErrors: true,
|
||||
};
|
||||
await $fetch("/api/v1/settings", {
|
||||
body: {
|
||||
...settings,
|
||||
},
|
||||
headers: {
|
||||
Authorization: `Bearer ${$session.token}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
function ackError() {
|
||||
localStorage.setItem("automaticErrorReportsAck", "true");
|
||||
showAck = false;
|
||||
}
|
||||
async function verifyToken() {
|
||||
if ($session.token) {
|
||||
try {
|
||||
@ -69,11 +97,6 @@
|
||||
cache: "no-cache",
|
||||
})
|
||||
.then(r => r.json());
|
||||
const branch =
|
||||
process.env.NODE_ENV === "production" &&
|
||||
window.location.hostname !== "test.andrasbacsai.dev"
|
||||
? "main"
|
||||
: "next";
|
||||
|
||||
return compareVersions(
|
||||
latest.coolify[branch].version,
|
||||
@ -85,6 +108,31 @@
|
||||
</script>
|
||||
|
||||
{#await verifyToken() then notUsed}
|
||||
{#if showAck}
|
||||
<div
|
||||
class="p-2 fixed top-0 right-0 z-50 w-64 m-2 rounded border-gradient-full bg-black"
|
||||
>
|
||||
<div class="text-white text-xs space-y-2 text-justify font-medium">
|
||||
<div>
|
||||
We implemented an automatic error reporting feature, which is enabled
|
||||
by default.
|
||||
</div>
|
||||
<div>
|
||||
Why? Because we would like to hunt down bugs faster and easier.
|
||||
</div>
|
||||
<div class="py-5">
|
||||
If you do not like it, you can turn it off in the <button
|
||||
class="underline font-bold"
|
||||
on:click="{$goto('/settings')}">Settings menu</button
|
||||
>.
|
||||
</div>
|
||||
<button
|
||||
class="button p-2 bg-warmGray-800 w-full text-center hover:bg-warmGray-700"
|
||||
on:click="{ackError}">OK</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{#if $route.path !== "/index"}
|
||||
<nav
|
||||
class="w-16 bg-warmGray-800 text-white top-0 left-0 fixed min-w-4rem min-h-screen"
|
||||
|
@ -1,5 +1,5 @@
|
||||
<script>
|
||||
import { params } from "@roxi/routify";
|
||||
import { params, redirect } from "@roxi/routify";
|
||||
import { onDestroy, onMount } from "svelte";
|
||||
import { fade } from "svelte/transition";
|
||||
import { fetch } from "@store";
|
||||
@ -15,13 +15,18 @@
|
||||
});
|
||||
|
||||
async function loadLogs() {
|
||||
const { events, progress } = await $fetch(
|
||||
try {
|
||||
const { events, progress } = await $fetch(
|
||||
`/api/v1/application/deploy/logs/${$params.deployId}`,
|
||||
);
|
||||
logs = [...events];
|
||||
if (progress === "done" || progress === "failed") {
|
||||
clearInterval(loadLogsInterval);
|
||||
}
|
||||
} catch(error) {
|
||||
$redirect('/dashboard')
|
||||
}
|
||||
|
||||
}
|
||||
onDestroy(() => {
|
||||
clearInterval(loadLogsInterval);
|
||||
|
@ -55,19 +55,20 @@ import Tooltip from "../../components/Tooltip/Tooltip.svelte";
|
||||
await $fetch(`/api/v1/application/check`, {
|
||||
body: $application,
|
||||
});
|
||||
const { nickname, name } = await $fetch(`/api/v1/application/deploy`, {
|
||||
const { nickname, name, deployId } = await $fetch(`/api/v1/application/deploy`, {
|
||||
body: $application,
|
||||
});
|
||||
$application.general.nickname = nickname;
|
||||
$application.build.container.name = name;
|
||||
$application.general.deployId = deployId;
|
||||
$initConf = JSON.parse(JSON.stringify($application));
|
||||
toast.push("Application deployment queued.");
|
||||
$redirect(
|
||||
`/application/${$application.repository.organization}/${$application.repository.name}/${$application.repository.branch}/logs`,
|
||||
$goto(
|
||||
`/application/${$application.repository.organization}/${$application.repository.name}/${$application.repository.branch}/logs/${$application.general.deployId}`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
toast.push(error.error ? error.error : "Ooops something went wrong.");
|
||||
toast.push(error.error || error || "Ooops something went wrong.");
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
@ -6,11 +6,13 @@
|
||||
|
||||
let settings = {
|
||||
allowRegistration: false,
|
||||
sendErrors: true
|
||||
};
|
||||
|
||||
async function loadSettings() {
|
||||
const response = await $fetch(`/api/v1/settings`);
|
||||
settings.allowRegistration = response.settings.allowRegistration;
|
||||
settings.sendErrors = response.settings.sendErrors;
|
||||
}
|
||||
async function changeSettings(value) {
|
||||
settings[value] = !settings[value];
|
||||
@ -23,7 +25,7 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="min-h-full text-white" in:fade="{{ duration: 100 }}">
|
||||
<div class="min-h-full text-white" in:fade="{{ duration: 100 }}">
|
||||
<div
|
||||
class="py-5 text-left px-6 text-3xl tracking-tight font-bold flex items-center"
|
||||
>
|
||||
@ -101,6 +103,67 @@
|
||||
</span>
|
||||
</button>
|
||||
</li>
|
||||
<li class="py-4 flex items-center justify-between">
|
||||
<div class="flex flex-col">
|
||||
<p class="text-base font-bold text-warmGray-100">
|
||||
Send errors automatically?
|
||||
</p>
|
||||
<p class="text-sm font-medium text-warmGray-400">
|
||||
Allow to send errors automatically to developer(s) at coolLabs (<a href="https://twitter.com/andrasbacsai" target="_blank" class="underline text-white font-bold hover:text-blue-400">Andras Bacsai</a>). This will help to fix bugs quicker. 🙏
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
on:click="{() => changeSettings('sendErrors')}"
|
||||
aria-pressed="true"
|
||||
class="relative inline-flex flex-shrink-0 h-6 w-11 border-2 border-transparent rounded-full cursor-pointer transition-colors ease-in-out duration-200"
|
||||
class:bg-green-600="{settings.sendErrors}"
|
||||
class:bg-warmGray-700="{!settings.sendErrors}"
|
||||
>
|
||||
<span class="sr-only">Use setting</span>
|
||||
<span
|
||||
class="pointer-events-none relative inline-block h-5 w-5 rounded-full bg-white shadow transform transition ease-in-out duration-200"
|
||||
class:translate-x-5="{settings.sendErrors}"
|
||||
class:translate-x-0="{!settings.sendErrors}"
|
||||
>
|
||||
<span
|
||||
class=" ease-in duration-200 absolute inset-0 h-full w-full flex items-center justify-center transition-opacity"
|
||||
class:opacity-0="{settings.sendErrors}"
|
||||
class:opacity-100="{!settings.sendErrors}"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<svg
|
||||
class="bg-white h-3 w-3 text-red-600"
|
||||
fill="none"
|
||||
viewBox="0 0 12 12"
|
||||
>
|
||||
<path
|
||||
d="M4 8l2-2m0 0l2-2M6 6L4 4m2 2l2 2"
|
||||
stroke="currentColor"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"></path>
|
||||
</svg>
|
||||
</span>
|
||||
<span
|
||||
class="ease-out duration-100 absolute inset-0 h-full w-full flex items-center justify-center transition-opacity"
|
||||
aria-hidden="true"
|
||||
class:opacity-100="{settings.sendErrors}"
|
||||
class:opacity-0="{!settings.sendErrors}"
|
||||
>
|
||||
<svg
|
||||
class="bg-white h-3 w-3 text-green-600"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 12 12"
|
||||
>
|
||||
<path
|
||||
d="M3.707 5.293a1 1 0 00-1.414 1.414l1.414-1.414zM5 8l-.707.707a1 1 0 001.414 0L5 8zm4.707-3.293a1 1 0 00-1.414-1.414l1.414 1.414zm-7.414 2l2 2 1.414-1.414-2-2-1.414 1.414zm3.414 2l4-4-1.414-1.414-4 4 1.414 1.414z"
|
||||
></path>
|
||||
</svg>
|
||||
</span>
|
||||
</span>
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -28,7 +28,7 @@ const templates = {
|
||||
directory: 'dist',
|
||||
name: 'Parcel'
|
||||
},
|
||||
'vue-cli-service': {
|
||||
'@vue/cli-service': {
|
||||
pack: 'static',
|
||||
...defaultBuildAndDeploy,
|
||||
directory: 'dist',
|
||||
|
@ -27,7 +27,9 @@ module.exports = {
|
||||
'mongodb-memory-server-core',
|
||||
'unique-names-generator',
|
||||
'generate-password',
|
||||
'@iarna/toml'
|
||||
'@iarna/toml',
|
||||
'http-errors-enhanced',
|
||||
'ajv'
|
||||
]
|
||||
},
|
||||
proxy: {
|
||||
|
Loading…
x
Reference in New Issue
Block a user