fix: worker
This commit is contained in:
parent
6be1fbacde
commit
71c15e0ff5
@ -44,6 +44,7 @@
|
|||||||
"node-forge": "1.3.1",
|
"node-forge": "1.3.1",
|
||||||
"node-os-utils": "1.3.7",
|
"node-os-utils": "1.3.7",
|
||||||
"p-all": "4.0.0",
|
"p-all": "4.0.0",
|
||||||
|
"p-throttle": "5.0.0",
|
||||||
"public-ip": "6.0.1",
|
"public-ip": "6.0.1",
|
||||||
"ssh-config": "4.1.6",
|
"ssh-config": "4.1.6",
|
||||||
"strip-ansi": "7.0.1",
|
"strip-ansi": "7.0.1",
|
||||||
|
@ -18,340 +18,354 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
parentPort.postMessage({ deploying: true });
|
const pThrottle = await import('p-throttle')
|
||||||
const queuedBuilds = await prisma.build.findMany({ where: { status: 'queued' }, orderBy: { createdAt: 'asc' } });
|
const throttle = pThrottle.default({
|
||||||
const { concurrentBuilds } = await prisma.setting.findFirst({})
|
limit: 1,
|
||||||
if (queuedBuilds.length > 0) {
|
interval: 2000
|
||||||
const concurrency = concurrentBuilds;
|
});
|
||||||
const pAll = await import('p-all');
|
|
||||||
const actions = []
|
|
||||||
|
|
||||||
for (const queueBuild of queuedBuilds) {
|
const th = throttle(async () => {
|
||||||
actions.push(async () => {
|
const queuedBuilds = await prisma.build.findMany({ where: { status: 'queued' }, orderBy: { createdAt: 'asc' } });
|
||||||
const application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
const { concurrentBuilds } = await prisma.setting.findFirst({})
|
||||||
const { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
|
if (queuedBuilds.length > 0) {
|
||||||
const {
|
parentPort.postMessage({ deploying: true });
|
||||||
id: applicationId,
|
const concurrency = concurrentBuilds;
|
||||||
repository,
|
const pAll = await import('p-all');
|
||||||
name,
|
const actions = []
|
||||||
destinationDocker,
|
|
||||||
destinationDockerId,
|
|
||||||
gitSource,
|
|
||||||
configHash,
|
|
||||||
fqdn,
|
|
||||||
projectId,
|
|
||||||
secrets,
|
|
||||||
phpModules,
|
|
||||||
settings,
|
|
||||||
persistentStorage,
|
|
||||||
pythonWSGI,
|
|
||||||
pythonModule,
|
|
||||||
pythonVariable,
|
|
||||||
denoOptions,
|
|
||||||
exposePort,
|
|
||||||
baseImage,
|
|
||||||
baseBuildImage,
|
|
||||||
deploymentType,
|
|
||||||
} = application
|
|
||||||
let {
|
|
||||||
branch,
|
|
||||||
buildPack,
|
|
||||||
port,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
publishDirectory,
|
|
||||||
dockerFileLocation,
|
|
||||||
denoMainFile
|
|
||||||
} = application
|
|
||||||
const currentHash = crypto
|
|
||||||
.createHash('sha256')
|
|
||||||
.update(
|
|
||||||
JSON.stringify({
|
|
||||||
pythonWSGI,
|
|
||||||
pythonModule,
|
|
||||||
pythonVariable,
|
|
||||||
deploymentType,
|
|
||||||
denoOptions,
|
|
||||||
baseImage,
|
|
||||||
baseBuildImage,
|
|
||||||
buildPack,
|
|
||||||
port,
|
|
||||||
exposePort,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
secrets,
|
|
||||||
branch,
|
|
||||||
repository,
|
|
||||||
fqdn
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.digest('hex');
|
|
||||||
try {
|
|
||||||
const { debug } = settings;
|
|
||||||
if (concurrency === 1) {
|
|
||||||
await prisma.build.updateMany({
|
|
||||||
where: {
|
|
||||||
status: { in: ['queued', 'running'] },
|
|
||||||
id: { not: buildId },
|
|
||||||
applicationId,
|
|
||||||
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
|
||||||
},
|
|
||||||
data: { status: 'failed' }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let imageId = applicationId;
|
|
||||||
let domain = getDomain(fqdn);
|
|
||||||
const volumes =
|
|
||||||
persistentStorage?.map((storage) => {
|
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
|
||||||
}${storage.path}`;
|
|
||||||
}) || [];
|
|
||||||
// Previews, we need to get the source branch and set subdomain
|
|
||||||
if (pullmergeRequestId) {
|
|
||||||
branch = sourceBranch;
|
|
||||||
domain = `${pullmergeRequestId}.${domain}`;
|
|
||||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
let deployNeeded = true;
|
for (const queueBuild of queuedBuilds) {
|
||||||
let destinationType;
|
actions.push(async () => {
|
||||||
|
const application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
||||||
if (destinationDockerId) {
|
const { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
|
||||||
destinationType = 'docker';
|
const {
|
||||||
}
|
id: applicationId,
|
||||||
if (destinationType === 'docker') {
|
repository,
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
name,
|
||||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
destinationDocker,
|
||||||
const configuration = await setDefaultConfiguration(application);
|
destinationDockerId,
|
||||||
|
gitSource,
|
||||||
buildPack = configuration.buildPack;
|
configHash,
|
||||||
port = configuration.port;
|
fqdn,
|
||||||
installCommand = configuration.installCommand;
|
projectId,
|
||||||
startCommand = configuration.startCommand;
|
secrets,
|
||||||
buildCommand = configuration.buildCommand;
|
phpModules,
|
||||||
publishDirectory = configuration.publishDirectory;
|
settings,
|
||||||
baseDirectory = configuration.baseDirectory;
|
persistentStorage,
|
||||||
dockerFileLocation = configuration.dockerFileLocation;
|
pythonWSGI,
|
||||||
denoMainFile = configuration.denoMainFile;
|
pythonModule,
|
||||||
const commit = await importers[gitSource.type]({
|
pythonVariable,
|
||||||
applicationId,
|
denoOptions,
|
||||||
debug,
|
exposePort,
|
||||||
workdir,
|
baseImage,
|
||||||
repodir,
|
baseBuildImage,
|
||||||
githubAppId: gitSource.githubApp?.id,
|
deploymentType,
|
||||||
gitlabAppId: gitSource.gitlabApp?.id,
|
} = application
|
||||||
customPort: gitSource.customPort,
|
let {
|
||||||
repository,
|
branch,
|
||||||
branch,
|
buildPack,
|
||||||
buildId,
|
port,
|
||||||
apiUrl: gitSource.apiUrl,
|
installCommand,
|
||||||
htmlUrl: gitSource.htmlUrl,
|
buildCommand,
|
||||||
projectId,
|
startCommand,
|
||||||
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
baseDirectory,
|
||||||
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
|
publishDirectory,
|
||||||
forPublic: gitSource.forPublic
|
dockerFileLocation,
|
||||||
});
|
denoMainFile
|
||||||
if (!commit) {
|
} = application
|
||||||
throw new Error('No commit found?');
|
const currentHash = crypto
|
||||||
}
|
.createHash('sha256')
|
||||||
let tag = commit.slice(0, 7);
|
.update(
|
||||||
if (pullmergeRequestId) {
|
JSON.stringify({
|
||||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
pythonWSGI,
|
||||||
}
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
try {
|
deploymentType,
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
denoOptions,
|
||||||
} catch (err) {
|
baseImage,
|
||||||
console.log(err);
|
baseBuildImage,
|
||||||
}
|
buildPack,
|
||||||
|
port,
|
||||||
if (!pullmergeRequestId) {
|
exposePort,
|
||||||
if (configHash !== currentHash) {
|
installCommand,
|
||||||
deployNeeded = true;
|
buildCommand,
|
||||||
if (configHash) {
|
startCommand,
|
||||||
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
secrets,
|
||||||
}
|
branch,
|
||||||
} else {
|
repository,
|
||||||
deployNeeded = false;
|
fqdn
|
||||||
}
|
|
||||||
} else {
|
|
||||||
deployNeeded = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let imageFound = false;
|
|
||||||
try {
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: destinationDocker.id,
|
|
||||||
command: `docker image inspect ${applicationId}:${tag}`
|
|
||||||
})
|
})
|
||||||
imageFound = true;
|
)
|
||||||
} catch (error) {
|
.digest('hex');
|
||||||
//
|
try {
|
||||||
}
|
const { debug } = settings;
|
||||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
if (concurrency === 1) {
|
||||||
|
|
||||||
if (forceRebuild) deployNeeded = true
|
|
||||||
if (!imageFound || deployNeeded) {
|
|
||||||
// if (true) {
|
|
||||||
if (buildpacks[buildPack])
|
|
||||||
await buildpacks[buildPack]({
|
|
||||||
dockerId: destinationDocker.id,
|
|
||||||
buildId,
|
|
||||||
applicationId,
|
|
||||||
domain,
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
pullmergeRequestId,
|
|
||||||
buildPack,
|
|
||||||
repository,
|
|
||||||
branch,
|
|
||||||
projectId,
|
|
||||||
publishDirectory,
|
|
||||||
debug,
|
|
||||||
commit,
|
|
||||||
tag,
|
|
||||||
workdir,
|
|
||||||
port: exposePort ? `${exposePort}:${port}` : port,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
secrets,
|
|
||||||
phpModules,
|
|
||||||
pythonWSGI,
|
|
||||||
pythonModule,
|
|
||||||
pythonVariable,
|
|
||||||
dockerFileLocation,
|
|
||||||
denoMainFile,
|
|
||||||
denoOptions,
|
|
||||||
baseImage,
|
|
||||||
baseBuildImage,
|
|
||||||
deploymentType
|
|
||||||
});
|
|
||||||
else {
|
|
||||||
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
|
||||||
throw new Error(`Build pack ${buildPack} not found.`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
|
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
|
|
||||||
} catch (error) {
|
|
||||||
//
|
|
||||||
}
|
|
||||||
const envs = [
|
|
||||||
`PORT=${port}`
|
|
||||||
];
|
|
||||||
if (secrets.length > 0) {
|
|
||||||
secrets.forEach((secret) => {
|
|
||||||
if (pullmergeRequestId) {
|
|
||||||
if (secret.isPRMRSecret) {
|
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (!secret.isPRMRSecret) {
|
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
|
||||||
const labels = makeLabelForStandaloneApplication({
|
|
||||||
applicationId,
|
|
||||||
fqdn,
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
pullmergeRequestId,
|
|
||||||
buildPack,
|
|
||||||
repository,
|
|
||||||
branch,
|
|
||||||
projectId,
|
|
||||||
port: exposePort ? `${exposePort}:${port}` : port,
|
|
||||||
commit,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
publishDirectory
|
|
||||||
});
|
|
||||||
let envFound = false;
|
|
||||||
try {
|
|
||||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
|
||||||
} catch (error) {
|
|
||||||
//
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
|
||||||
const composeVolumes = volumes.map((volume) => {
|
|
||||||
return {
|
|
||||||
[`${volume.split(':')[0]}`]: {
|
|
||||||
name: volume.split(':')[0]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
const composeFile = {
|
|
||||||
version: '3.8',
|
|
||||||
services: {
|
|
||||||
[imageId]: {
|
|
||||||
image: `${applicationId}:${tag}`,
|
|
||||||
container_name: imageId,
|
|
||||||
volumes,
|
|
||||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
|
||||||
labels,
|
|
||||||
depends_on: [],
|
|
||||||
expose: [port],
|
|
||||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
|
||||||
// logging: {
|
|
||||||
// driver: 'fluentd',
|
|
||||||
// },
|
|
||||||
...defaultComposeConfiguration(destinationDocker.network),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
networks: {
|
|
||||||
[destinationDocker.network]: {
|
|
||||||
external: true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
volumes: Object.assign({}, ...composeVolumes)
|
|
||||||
};
|
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
|
||||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
|
||||||
} catch (error) {
|
|
||||||
await saveBuildLog({ line: error, buildId, applicationId });
|
|
||||||
await prisma.build.updateMany({
|
await prisma.build.updateMany({
|
||||||
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
where: {
|
||||||
|
status: { in: ['queued', 'running'] },
|
||||||
|
id: { not: buildId },
|
||||||
|
applicationId,
|
||||||
|
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||||
|
},
|
||||||
data: { status: 'failed' }
|
data: { status: 'failed' }
|
||||||
});
|
});
|
||||||
throw new Error(error);
|
|
||||||
}
|
}
|
||||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
let imageId = applicationId;
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
let domain = getDomain(fqdn);
|
||||||
if (!pullmergeRequestId) await prisma.application.update({
|
const volumes =
|
||||||
where: { id: applicationId },
|
persistentStorage?.map((storage) => {
|
||||||
data: { configHash: currentHash }
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||||
});
|
}${storage.path}`;
|
||||||
|
}) || [];
|
||||||
|
// Previews, we need to get the source branch and set subdomain
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
branch = sourceBranch;
|
||||||
|
domain = `${pullmergeRequestId}.${domain}`;
|
||||||
|
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let deployNeeded = true;
|
||||||
|
let destinationType;
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
destinationType = 'docker';
|
||||||
|
}
|
||||||
|
if (destinationType === 'docker') {
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||||
|
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||||
|
const configuration = await setDefaultConfiguration(application);
|
||||||
|
|
||||||
|
buildPack = configuration.buildPack;
|
||||||
|
port = configuration.port;
|
||||||
|
installCommand = configuration.installCommand;
|
||||||
|
startCommand = configuration.startCommand;
|
||||||
|
buildCommand = configuration.buildCommand;
|
||||||
|
publishDirectory = configuration.publishDirectory;
|
||||||
|
baseDirectory = configuration.baseDirectory;
|
||||||
|
dockerFileLocation = configuration.dockerFileLocation;
|
||||||
|
denoMainFile = configuration.denoMainFile;
|
||||||
|
const commit = await importers[gitSource.type]({
|
||||||
|
applicationId,
|
||||||
|
debug,
|
||||||
|
workdir,
|
||||||
|
repodir,
|
||||||
|
githubAppId: gitSource.githubApp?.id,
|
||||||
|
gitlabAppId: gitSource.gitlabApp?.id,
|
||||||
|
customPort: gitSource.customPort,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
buildId,
|
||||||
|
apiUrl: gitSource.apiUrl,
|
||||||
|
htmlUrl: gitSource.htmlUrl,
|
||||||
|
projectId,
|
||||||
|
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||||
|
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
|
||||||
|
forPublic: gitSource.forPublic
|
||||||
|
});
|
||||||
|
if (!commit) {
|
||||||
|
throw new Error('No commit found?');
|
||||||
|
}
|
||||||
|
let tag = commit.slice(0, 7);
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||||
|
} catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!pullmergeRequestId) {
|
||||||
|
if (configHash !== currentHash) {
|
||||||
|
deployNeeded = true;
|
||||||
|
if (configHash) {
|
||||||
|
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
deployNeeded = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
deployNeeded = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageFound = false;
|
||||||
|
try {
|
||||||
|
await executeDockerCmd({
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker image inspect ${applicationId}:${tag}`
|
||||||
|
})
|
||||||
|
imageFound = true;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||||
|
|
||||||
|
if (forceRebuild) deployNeeded = true
|
||||||
|
if (!imageFound || deployNeeded) {
|
||||||
|
// if (true) {
|
||||||
|
if (buildpacks[buildPack])
|
||||||
|
await buildpacks[buildPack]({
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
domain,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
publishDirectory,
|
||||||
|
debug,
|
||||||
|
commit,
|
||||||
|
tag,
|
||||||
|
workdir,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
secrets,
|
||||||
|
phpModules,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile,
|
||||||
|
denoOptions,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType
|
||||||
|
});
|
||||||
|
else {
|
||||||
|
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
||||||
|
throw new Error(`Build pack ${buildPack} not found.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
|
||||||
|
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
const envs = [
|
||||||
|
`PORT=${port}`
|
||||||
|
];
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
if (secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
const labels = makeLabelForStandaloneApplication({
|
||||||
|
applicationId,
|
||||||
|
fqdn,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port,
|
||||||
|
commit,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory
|
||||||
|
});
|
||||||
|
let envFound = false;
|
||||||
|
try {
|
||||||
|
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
||||||
|
const composeVolumes = volumes.map((volume) => {
|
||||||
|
return {
|
||||||
|
[`${volume.split(':')[0]}`]: {
|
||||||
|
name: volume.split(':')[0]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const composeFile = {
|
||||||
|
version: '3.8',
|
||||||
|
services: {
|
||||||
|
[imageId]: {
|
||||||
|
image: `${applicationId}:${tag}`,
|
||||||
|
container_name: imageId,
|
||||||
|
volumes,
|
||||||
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
|
labels,
|
||||||
|
depends_on: [],
|
||||||
|
expose: [port],
|
||||||
|
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||||
|
// logging: {
|
||||||
|
// driver: 'fluentd',
|
||||||
|
// },
|
||||||
|
...defaultComposeConfiguration(destinationDocker.network),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
networks: {
|
||||||
|
[destinationDocker.network]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
|
};
|
||||||
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
|
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
|
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
||||||
|
} catch (error) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
|
await prisma.build.updateMany({
|
||||||
|
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
||||||
|
data: { status: 'failed' }
|
||||||
|
});
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
|
if (!pullmergeRequestId) await prisma.application.update({
|
||||||
|
where: { id: applicationId },
|
||||||
|
data: { configHash: currentHash }
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
catch (error) {
|
||||||
catch (error) {
|
await prisma.build.updateMany({
|
||||||
await prisma.build.updateMany({
|
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
||||||
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
data: { status: 'failed' }
|
||||||
data: { status: 'failed' }
|
});
|
||||||
});
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
await saveBuildLog({ line: error, buildId, applicationId });
|
}
|
||||||
}
|
});
|
||||||
});
|
}
|
||||||
|
await pAll.default(actions, { concurrency })
|
||||||
}
|
}
|
||||||
await pAll.default(actions, { concurrency })
|
})
|
||||||
|
while (true) {
|
||||||
|
await th()
|
||||||
}
|
}
|
||||||
} catch (error) { } finally {
|
|
||||||
await prisma.$disconnect();
|
} catch (error) {
|
||||||
process.exit(0);
|
} finally {
|
||||||
|
await prisma.$disconnect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
} else process.exit(0);
|
} else process.exit(0);
|
||||||
})();
|
})();
|
||||||
|
@ -134,9 +134,32 @@ export const asyncExecShellStream = async ({ debug, buildId, applicationId, comm
|
|||||||
export const asyncSleep = (delay: number): Promise<unknown> =>
|
export const asyncSleep = (delay: number): Promise<unknown> =>
|
||||||
new Promise((resolve) => setTimeout(resolve, delay));
|
new Promise((resolve) => setTimeout(resolve, delay));
|
||||||
export const prisma = new PrismaClient({
|
export const prisma = new PrismaClient({
|
||||||
errorFormat: 'minimal'
|
errorFormat: 'minimal',
|
||||||
|
log: [
|
||||||
|
{
|
||||||
|
emit: 'event',
|
||||||
|
level: 'query',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
emit: 'stdout',
|
||||||
|
level: 'error',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
emit: 'stdout',
|
||||||
|
level: 'info',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
emit: 'stdout',
|
||||||
|
level: 'warn',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// prisma.$on('query', (e) => {
|
||||||
|
// console.log('Query: ' + e.query)
|
||||||
|
// console.log('Params: ' + e.params)
|
||||||
|
// console.log('Duration: ' + e.duration + 'ms')
|
||||||
|
// })
|
||||||
export const base64Encode = (text: string): string => {
|
export const base64Encode = (text: string): string => {
|
||||||
return Buffer.from(text).toString('base64');
|
return Buffer.from(text).toString('base64');
|
||||||
};
|
};
|
||||||
|
796
pnpm-lock.yaml
796
pnpm-lock.yaml
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user