refactor(pipeline-tasks-runner): rabbitmq
This commit is contained in:
@ -18,6 +18,8 @@ import { ParseBodyMiddleware } from './commons/middlewares/parse-body.middleware
|
||||
import { BullModule } from '@nestjs/bull';
|
||||
import { PubSubModule } from './commons/pub-sub/pub-sub.module';
|
||||
import { LoggerModule } from 'nestjs-pino';
|
||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
|
||||
|
||||
import pinoPretty from 'pino-pretty';
|
||||
|
||||
@Module({
|
||||
|
13
src/pipeline-tasks/models/pipeline-task-event.ts
Normal file
13
src/pipeline-tasks/models/pipeline-task-event.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { PipelineUnits } from '../enums/pipeline-units.enum';
|
||||
import { TaskStatuses } from '../enums/task-statuses.enum';
|
||||
|
||||
export class PipelineTaskEvent {
|
||||
taskId: string;
|
||||
pipelineId: string;
|
||||
projectId: string;
|
||||
unit: PipelineUnits | null;
|
||||
emittedAt: Date;
|
||||
message: string;
|
||||
messageType: 'stdout' | 'stderr' | 'stdin';
|
||||
status: TaskStatuses;
|
||||
}
|
322
src/pipeline-tasks/pipeline-task.runner.spec.ts
Normal file
322
src/pipeline-tasks/pipeline-task.runner.spec.ts
Normal file
@ -0,0 +1,322 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { ReposService } from '../repos/repos.service';
|
||||
import { PipelineUnits } from './enums/pipeline-units.enum';
|
||||
import { PipelineTask } from './pipeline-task.entity';
|
||||
import { Pipeline } from '../pipelines/pipeline.entity';
|
||||
import { Project } from '../projects/project.entity';
|
||||
import { TaskStatuses } from './enums/task-statuses.enum';
|
||||
import { getLoggerToken, PinoLogger } from 'nestjs-pino';
|
||||
import { PipelineTaskRunner } from './pipeline-task.runner';
|
||||
import { WorkUnitMetadata } from './models/work-unit-metadata.model';
|
||||
import { Code } from 'typeorm';
|
||||
describe('PipelineTaskRunner', () => {
|
||||
let runner: PipelineTaskRunner;
|
||||
let reposService: ReposService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
{
|
||||
provide: ReposService,
|
||||
useValue: {
|
||||
getWorkspaceRootByTask: () => 'workspace-root',
|
||||
checkout: async () => undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: getLoggerToken(PipelineTaskRunner.name),
|
||||
useValue: new PinoLogger({}),
|
||||
},
|
||||
{
|
||||
provide: 'spawn',
|
||||
useValue: () => undefined,
|
||||
},
|
||||
PipelineTaskRunner,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
reposService = module.get(ReposService);
|
||||
runner = module.get(PipelineTaskRunner);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(runner).toBeDefined();
|
||||
});
|
||||
|
||||
it('onNewTask', async () => {
|
||||
const task = new PipelineTask();
|
||||
let tmpTask;
|
||||
const doTask = jest
|
||||
.spyOn(runner, 'doTask')
|
||||
.mockImplementation(async (task) => {
|
||||
tmpTask = task;
|
||||
});
|
||||
await runner.onNewTask(task);
|
||||
expect(tmpTask).toEqual(task);
|
||||
expect(doTask).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
describe('test biz', () => {
|
||||
let emitEvent: jest.SpyInstance;
|
||||
beforeEach(() => {
|
||||
emitEvent = jest
|
||||
.spyOn(runner, 'emitEvent')
|
||||
.mockImplementation((..._) => Promise.resolve());
|
||||
});
|
||||
|
||||
describe('doTask', () => {
|
||||
let checkout: jest.SpyInstance;
|
||||
let doTaskUnit: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
checkout = jest
|
||||
.spyOn(runner, 'checkout')
|
||||
.mockImplementation((..._) => Promise.resolve('/null'));
|
||||
doTaskUnit = jest
|
||||
.spyOn(runner, 'doTaskUnit')
|
||||
.mockImplementation((..._) => Promise.resolve());
|
||||
});
|
||||
|
||||
it('only checkout', async () => {
|
||||
const task = new PipelineTask();
|
||||
(task.id = 'taskId'), (task.pipeline = new Pipeline());
|
||||
task.units = [PipelineUnits.checkout];
|
||||
task.pipeline.id = 'pipelineId';
|
||||
task.pipeline.project = new Project();
|
||||
task.pipeline.project.id = 'projectId';
|
||||
task.pipeline.workUnitMetadata = new WorkUnitMetadata();
|
||||
task.pipeline.workUnitMetadata.version = 1;
|
||||
task.pipeline.workUnitMetadata.units = [
|
||||
{
|
||||
type: PipelineUnits.checkout,
|
||||
scripts: [],
|
||||
},
|
||||
];
|
||||
|
||||
await runner.doTask(task);
|
||||
|
||||
expect(checkout).toBeCalledTimes(1);
|
||||
expect(doTaskUnit).toBeCalledTimes(0);
|
||||
expect(emitEvent).toBeCalledTimes(2);
|
||||
expect(emitEvent.mock.calls[0][0]).toMatchObject(task);
|
||||
expect(emitEvent.mock.calls[0][1]).toBeNull();
|
||||
expect(emitEvent.mock.calls[0][2]).toEqual(TaskStatuses.working);
|
||||
expect(emitEvent.mock.calls[1][0]).toMatchObject(task);
|
||||
expect(emitEvent.mock.calls[1][1]).toBeNull();
|
||||
expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.success);
|
||||
});
|
||||
|
||||
it('many units', async () => {
|
||||
const task = new PipelineTask();
|
||||
(task.id = 'taskId'), (task.pipeline = new Pipeline());
|
||||
task.units = [
|
||||
PipelineUnits.checkout,
|
||||
PipelineUnits.test,
|
||||
PipelineUnits.deploy,
|
||||
];
|
||||
task.pipeline.id = 'pipelineId';
|
||||
task.pipeline.project = new Project();
|
||||
task.pipeline.project.id = 'projectId';
|
||||
task.pipeline.workUnitMetadata = new WorkUnitMetadata();
|
||||
task.pipeline.workUnitMetadata.version = 1;
|
||||
task.pipeline.workUnitMetadata.units = [
|
||||
{
|
||||
type: PipelineUnits.checkout,
|
||||
scripts: [],
|
||||
},
|
||||
{
|
||||
type: PipelineUnits.installDependencies,
|
||||
scripts: ['pwd'],
|
||||
},
|
||||
{
|
||||
type: PipelineUnits.test,
|
||||
scripts: ['pwd'],
|
||||
},
|
||||
{
|
||||
type: PipelineUnits.deploy,
|
||||
scripts: ['pwd', 'uname'],
|
||||
},
|
||||
];
|
||||
|
||||
await runner.doTask(task);
|
||||
|
||||
expect(checkout).toBeCalledTimes(1);
|
||||
expect(doTaskUnit).toBeCalledTimes(2);
|
||||
expect(emitEvent).toBeCalledTimes(2);
|
||||
});
|
||||
|
||||
it('unit work failed', async () => {
|
||||
const task = new PipelineTask();
|
||||
(task.id = 'taskId'), (task.pipeline = new Pipeline());
|
||||
task.units = [PipelineUnits.checkout, PipelineUnits.test];
|
||||
task.pipeline.id = 'pipelineId';
|
||||
task.pipeline.project = new Project();
|
||||
task.pipeline.project.id = 'projectId';
|
||||
task.pipeline.workUnitMetadata = new WorkUnitMetadata();
|
||||
task.pipeline.workUnitMetadata.version = 1;
|
||||
task.pipeline.workUnitMetadata.units = [
|
||||
{
|
||||
type: PipelineUnits.checkout,
|
||||
scripts: [],
|
||||
},
|
||||
{
|
||||
type: PipelineUnits.test,
|
||||
scripts: ['pwd'],
|
||||
},
|
||||
];
|
||||
|
||||
doTaskUnit = jest
|
||||
.spyOn(runner, 'doTaskUnit')
|
||||
.mockImplementation((..._) =>
|
||||
Promise.reject(new Error('test error')),
|
||||
);
|
||||
await runner.doTask(task);
|
||||
|
||||
expect(checkout).toBeCalledTimes(1);
|
||||
expect(doTaskUnit).toBeCalledTimes(1);
|
||||
expect(emitEvent).toBeCalledTimes(2);
|
||||
expect(emitEvent.mock.calls[1][0]).toMatchObject(task);
|
||||
expect(emitEvent.mock.calls[1][1]).toBeNull();
|
||||
expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.failed);
|
||||
});
|
||||
});
|
||||
|
||||
describe('doTaskUnit', () => {
|
||||
it('success', async () => {
|
||||
const runScript = jest
|
||||
.spyOn(runner, 'runScript')
|
||||
.mockImplementation((..._) => Promise.resolve());
|
||||
const task = new PipelineTask();
|
||||
|
||||
const unit = PipelineUnits.test;
|
||||
const workspacePath = '/null';
|
||||
await runner.doTaskUnit(unit, ['pwd'], task, workspacePath);
|
||||
|
||||
expect(emitEvent.mock.calls[0][0]).toEqual(task);
|
||||
expect(emitEvent.mock.calls[0][1]).toEqual(unit);
|
||||
expect(emitEvent.mock.calls[0][2]).toEqual(TaskStatuses.working);
|
||||
expect(emitEvent.mock.calls[1][0]).toEqual(task);
|
||||
expect(emitEvent.mock.calls[1][1]).toEqual(unit);
|
||||
expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.success);
|
||||
expect(runScript.mock.calls[0][0]).toEqual('pwd');
|
||||
expect(runScript.mock.calls[0][1]).toEqual(workspacePath);
|
||||
expect(runScript.mock.calls[0][2]).toEqual(task);
|
||||
expect(runScript.mock.calls[0][3]).toEqual(unit);
|
||||
});
|
||||
it('failed', async () => {
|
||||
const runScript = jest
|
||||
.spyOn(runner, 'runScript')
|
||||
.mockImplementation((..._) =>
|
||||
Promise.reject(new Error('test error')),
|
||||
);
|
||||
const task = new PipelineTask();
|
||||
|
||||
const unit = PipelineUnits.test;
|
||||
const workspacePath = '/null';
|
||||
await expect(
|
||||
runner.doTaskUnit(unit, ['pwd'], task, workspacePath),
|
||||
).rejects.toThrow('test error');
|
||||
|
||||
expect(emitEvent.mock.calls[1]?.[0]).toEqual(task);
|
||||
expect(emitEvent.mock.calls[1]?.[1]).toEqual(unit);
|
||||
expect(emitEvent.mock.calls[1]?.[2]).toEqual(TaskStatuses.failed);
|
||||
expect(runScript).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('runScript', () => {
|
||||
it('normal', async () => {
|
||||
const spawn = jest.fn((..._: any[]) => ({
|
||||
stdout: {
|
||||
on: () => undefined,
|
||||
},
|
||||
stderr: {
|
||||
on: () => undefined,
|
||||
},
|
||||
addListener: (_: any, fn: (code: number) => void) => {
|
||||
fn(0);
|
||||
},
|
||||
}));
|
||||
(runner as any).spawn = spawn;
|
||||
|
||||
const task = new PipelineTask();
|
||||
task.id = 'taskId';
|
||||
const unit = PipelineUnits.deploy;
|
||||
|
||||
await runner.runScript('script name', 'workspaceRoot', task, unit);
|
||||
expect(spawn).toHaveBeenCalledTimes(1);
|
||||
expect(spawn.mock.calls[0][0]).toEqual('script name');
|
||||
expect(spawn.mock.calls[0][1]).toMatchObject({
|
||||
shell: true,
|
||||
cwd: 'workspaceRoot',
|
||||
});
|
||||
});
|
||||
it('failed', async () => {
|
||||
const spawn = jest.fn((..._: any[]) => ({
|
||||
stdout: {
|
||||
on: () => undefined,
|
||||
},
|
||||
stderr: {
|
||||
on: () => undefined,
|
||||
},
|
||||
addListener: (_: any, fn: (code: number) => void) => {
|
||||
fn(1);
|
||||
},
|
||||
}));
|
||||
(runner as any).spawn = spawn;
|
||||
|
||||
const task = new PipelineTask();
|
||||
task.id = 'taskId';
|
||||
const unit = PipelineUnits.deploy;
|
||||
|
||||
expect(
|
||||
runner.runScript('script name', 'workspaceRoot', task, unit),
|
||||
).rejects.toThrowError();
|
||||
});
|
||||
it('wait emit message done', async () => {
|
||||
let finishedFn: () => void;
|
||||
const on = jest.fn((_: any, fn: (buff: Buffer) => void) => {
|
||||
setTimeout(() => {
|
||||
fn(Buffer.from('message 1'));
|
||||
setTimeout(() => {
|
||||
fn(Buffer.from('message 2'));
|
||||
setTimeout(() => {
|
||||
fn(Buffer.from('message 3'));
|
||||
finishedFn();
|
||||
}, 1000);
|
||||
}, 10);
|
||||
}, 10);
|
||||
});
|
||||
const spawn = jest.fn((..._: any[]) => ({
|
||||
stdout: {
|
||||
on,
|
||||
},
|
||||
stderr: {
|
||||
on,
|
||||
},
|
||||
addListener: (_: any, fn: (code: number) => void) => {
|
||||
finishedFn = () => fn(0);
|
||||
},
|
||||
}));
|
||||
|
||||
let emitSuccessCount = 0;
|
||||
jest.spyOn(runner, 'emitEvent').mockImplementation((..._: any[]) => {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
emitSuccessCount++;
|
||||
resolve();
|
||||
}, 1000);
|
||||
});
|
||||
});
|
||||
(runner as any).spawn = spawn;
|
||||
|
||||
const task = new PipelineTask();
|
||||
task.id = 'taskId';
|
||||
const unit = PipelineUnits.deploy;
|
||||
|
||||
await runner.runScript('script name', 'workspaceRoot', task, unit);
|
||||
expect(emitSuccessCount).toEqual(1 + 6);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
253
src/pipeline-tasks/pipeline-task.runner.ts
Normal file
253
src/pipeline-tasks/pipeline-task.runner.ts
Normal file
@ -0,0 +1,253 @@
|
||||
import { ReposService } from '../repos/repos.service';
|
||||
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
|
||||
import { PipelineTask } from './pipeline-task.entity';
|
||||
import { ApplicationException } from '../commons/exceptions/application.exception';
|
||||
import { PipelineUnits } from './enums/pipeline-units.enum';
|
||||
import { TaskStatuses } from './enums/task-statuses.enum';
|
||||
import { InjectPinoLogger, PinoLogger } from 'nestjs-pino';
|
||||
import { RabbitSubscribe } from '@golevelup/nestjs-rabbitmq';
|
||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
|
||||
import { last } from 'ramda';
|
||||
import { Inject } from '@nestjs/common';
|
||||
|
||||
type Spawn = typeof spawn;
|
||||
|
||||
export class PipelineTaskRunner {
|
||||
readonly processes = new Map<string, ChildProcessWithoutNullStreams>();
|
||||
|
||||
constructor(
|
||||
private readonly reposService: ReposService,
|
||||
@InjectPinoLogger(PipelineTaskRunner.name)
|
||||
private readonly logger: PinoLogger,
|
||||
@Inject('spawn')
|
||||
private readonly spawn: Spawn,
|
||||
) {}
|
||||
@RabbitSubscribe({
|
||||
exchange: 'new-pipeline-task',
|
||||
routingKey: 'mac',
|
||||
queue: 'mac.new-pipeline-task',
|
||||
})
|
||||
async onNewTask(task: PipelineTask) {
|
||||
this.logger.info({ task }, 'on new task [%s].', task.id);
|
||||
try {
|
||||
await this.doTask(task);
|
||||
} catch (err) {
|
||||
this.logger.error({ task, err }, err.message);
|
||||
}
|
||||
}
|
||||
@RabbitSubscribe({
|
||||
exchange: 'stop-pipeline-task',
|
||||
routingKey: 'mac',
|
||||
queue: 'mac.stop-pipeline-task',
|
||||
})
|
||||
async onStopTask(task: PipelineTask) {
|
||||
this.logger.info({ task }, 'on stop task [%s].', task.id);
|
||||
const process = this.processes.get(task.id);
|
||||
if (process) {
|
||||
this.logger.info({ task }, 'send signal SIGINT to child process.');
|
||||
process.kill('SIGINT');
|
||||
|
||||
setTimeout(() => {
|
||||
if (process === this.processes.get(task.id)) {
|
||||
this.logger.info({ task }, 'send signal SIGKILL to child process.');
|
||||
process.kill('SIGKILL');
|
||||
return;
|
||||
}
|
||||
if (this.processes.has(task.id)) {
|
||||
this.logger.error(
|
||||
{ task },
|
||||
'this pipeline task not stop yet. there is a new process running, maybe is a bug about error capture',
|
||||
);
|
||||
}
|
||||
}, 10_000);
|
||||
} else {
|
||||
this.logger.info({ task }, 'child process is not running.');
|
||||
}
|
||||
}
|
||||
|
||||
async doTask(task: PipelineTask) {
|
||||
if (task.pipeline.workUnitMetadata.version !== 1) {
|
||||
throw new ApplicationException(
|
||||
'work unit metadata version is not match.',
|
||||
);
|
||||
}
|
||||
await this.emitEvent(
|
||||
task,
|
||||
null,
|
||||
TaskStatuses.working,
|
||||
`[start task]`,
|
||||
'stdout',
|
||||
);
|
||||
|
||||
this.logger.info('running task [%s].', task.id);
|
||||
try {
|
||||
const workspaceRoot = await this.checkout(task);
|
||||
const units = task.units
|
||||
.filter((unit) => unit !== PipelineUnits.checkout)
|
||||
.map(
|
||||
(type) =>
|
||||
task.pipeline.workUnitMetadata.units.find(
|
||||
(unit) => unit.type === type,
|
||||
) ?? { type: type, scripts: [] },
|
||||
);
|
||||
this.logger.info({ units }, 'begin run units.');
|
||||
for (const unit of units) {
|
||||
await this.doTaskUnit(unit.type, unit.scripts, task, workspaceRoot);
|
||||
}
|
||||
await this.emitEvent(
|
||||
task,
|
||||
null,
|
||||
TaskStatuses.success,
|
||||
`[finished task] success`,
|
||||
'stdout',
|
||||
);
|
||||
this.logger.info({ task }, 'task [%s] completed.', task.id);
|
||||
} catch (err) {
|
||||
await this.emitEvent(
|
||||
task,
|
||||
null,
|
||||
TaskStatuses.failed,
|
||||
`[finished unit] ${err.message}`,
|
||||
'stderr',
|
||||
);
|
||||
this.logger.error({ task, error: err }, 'task [%s] failed.', task.id);
|
||||
} finally {
|
||||
}
|
||||
}
|
||||
|
||||
async doTaskUnit(
|
||||
unit: PipelineUnits,
|
||||
scripts: string[],
|
||||
task: PipelineTask,
|
||||
workspaceRoot: string,
|
||||
) {
|
||||
await this.emitEvent(
|
||||
task,
|
||||
unit,
|
||||
TaskStatuses.working,
|
||||
`[begin unit] ${unit}`,
|
||||
'stdin',
|
||||
);
|
||||
this.logger.info({ task }, 'curr unit is %s', unit);
|
||||
try {
|
||||
for (const script of scripts) {
|
||||
this.logger.debug('begin runScript %s', script);
|
||||
await this.runScript(script, workspaceRoot, task, unit);
|
||||
this.logger.debug('end runScript %s', script);
|
||||
}
|
||||
|
||||
await this.emitEvent(
|
||||
task,
|
||||
unit,
|
||||
TaskStatuses.success,
|
||||
`[finished unit] ${unit}`,
|
||||
'stdout',
|
||||
);
|
||||
} catch (err) {
|
||||
await this.emitEvent(
|
||||
task,
|
||||
unit,
|
||||
TaskStatuses.failed,
|
||||
`[finished unit] ${err.message}`,
|
||||
'stderr',
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async checkout(task: PipelineTask) {
|
||||
await this.emitEvent(
|
||||
task,
|
||||
PipelineUnits.checkout,
|
||||
TaskStatuses.working,
|
||||
'[begin unit] checkout',
|
||||
'stdin',
|
||||
);
|
||||
try {
|
||||
const path = await this.reposService.checkout4Task(task);
|
||||
await this.emitEvent(
|
||||
task,
|
||||
PipelineUnits.checkout,
|
||||
TaskStatuses.success,
|
||||
'checkout success.',
|
||||
'stdout',
|
||||
);
|
||||
return path;
|
||||
} catch (err) {
|
||||
await this.emitEvent(
|
||||
task,
|
||||
PipelineUnits.checkout,
|
||||
TaskStatuses.failed,
|
||||
'checkout failed.',
|
||||
'stderr',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async emitEvent(
|
||||
task: PipelineTask,
|
||||
unit: PipelineUnits | null,
|
||||
status: TaskStatuses,
|
||||
message: string,
|
||||
messageType: 'stderr' | 'stdout' | 'stdin',
|
||||
) {
|
||||
const event: PipelineTaskEvent = {
|
||||
taskId: task.id,
|
||||
pipelineId: task.pipeline.id,
|
||||
projectId: task.pipeline.project.id,
|
||||
unit,
|
||||
emittedAt: new Date(),
|
||||
message: last(message) === '\n' ? message : message + '\n',
|
||||
messageType,
|
||||
status,
|
||||
};
|
||||
}
|
||||
|
||||
async runScript(
|
||||
script: string,
|
||||
workspaceRoot: string,
|
||||
task: PipelineTask,
|
||||
unit: PipelineUnits,
|
||||
): Promise<void> {
|
||||
await this.emitEvent(task, unit, TaskStatuses.working, script, 'stdin');
|
||||
return new Promise((resolve, reject) => {
|
||||
const sub = this.spawn(script, {
|
||||
shell: true,
|
||||
cwd: workspaceRoot,
|
||||
});
|
||||
this.processes.set(task.id, sub);
|
||||
let loggingCount = 0; // semaphore
|
||||
|
||||
sub.stderr.on('data', (data: Buffer) => {
|
||||
const str = data.toString();
|
||||
loggingCount++;
|
||||
|
||||
this.emitEvent(task, unit, TaskStatuses.working, str, 'stdout').finally(
|
||||
() => loggingCount--,
|
||||
);
|
||||
});
|
||||
sub.stdout.on('data', (data: Buffer) => {
|
||||
const str = data.toString();
|
||||
loggingCount++;
|
||||
|
||||
this.emitEvent(task, unit, TaskStatuses.working, str, 'stderr').finally(
|
||||
() => loggingCount--,
|
||||
);
|
||||
});
|
||||
sub.addListener('close', async (code) => {
|
||||
this.processes.delete(task.id);
|
||||
await new Promise<void>(async (resolve) => {
|
||||
for (let i = 0; i < 10 && loggingCount > 0; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
this.logger.debug('waiting logging... (%dx500ms)', i);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
if (code === 0) {
|
||||
return resolve();
|
||||
}
|
||||
return reject(new ApplicationException('exec script failed'));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
@ -11,6 +11,10 @@ import { PipelineTaskConsumer } from './pipeline-task.consumer';
|
||||
import { PIPELINE_TASK_QUEUE } from './pipeline-tasks.constants';
|
||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
|
||||
import { PubSubModule } from '../commons/pub-sub/pub-sub.module';
|
||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { PipelineTaskRunner } from './pipeline-task.runner';
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@ -21,12 +25,48 @@ import { PubSubModule } from '../commons/pub-sub/pub-sub.module';
|
||||
PubSubModule.forFeature(),
|
||||
RedisModule,
|
||||
ReposModule,
|
||||
|
||||
RabbitMQModule.forRootAsync(RabbitMQModule, {
|
||||
imports: [ConfigModule],
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
uri: configService.get<string>('db.rabbitmq.uri'),
|
||||
exchanges: [
|
||||
{
|
||||
name: 'new-pipeline-task',
|
||||
type: 'fanout',
|
||||
options: {
|
||||
durable: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'stop-pipeline-task',
|
||||
type: 'fanout',
|
||||
options: {
|
||||
durable: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'update-pipeline-task',
|
||||
type: 'fanout',
|
||||
options: {
|
||||
durable: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
inject: [ConfigService],
|
||||
}),
|
||||
],
|
||||
providers: [
|
||||
PipelineTasksService,
|
||||
PipelineTasksResolver,
|
||||
PipelineTaskConsumer,
|
||||
PipelineTaskLogsService,
|
||||
PipelineTaskRunner,
|
||||
{
|
||||
provide: 'spawn',
|
||||
useValue: spawn,
|
||||
},
|
||||
],
|
||||
exports: [PipelineTasksService],
|
||||
})
|
||||
|
@ -46,7 +46,12 @@ export class PipelineTasksResolver {
|
||||
}
|
||||
|
||||
@Query(() => PipelineTask)
|
||||
async findPipelineTask(@Args('id') id: string) {
|
||||
async pipelineTask(@Args('id') id: string) {
|
||||
return await this.service.findTaskById(id);
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean)
|
||||
async stopPipelineTask(@Args('id') id: string) {
|
||||
const task = await this.service.findTaskById(id);
|
||||
}
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ import debug from 'debug';
|
||||
import { InjectPubSub } from '../commons/pub-sub/decorators/inject-pub-sub.decorator';
|
||||
import { PubSub } from '../commons/pub-sub/pub-sub';
|
||||
import { observableToAsyncIterable } from '@graphql-tools/utils';
|
||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
|
||||
|
||||
const log = debug('fennec:pipeline-tasks:service');
|
||||
|
||||
@ -30,37 +31,29 @@ export class PipelineTasksService {
|
||||
private readonly redis: RedisService,
|
||||
@InjectPubSub()
|
||||
private readonly pubSub: PubSub,
|
||||
private readonly amqpConnection: AmqpConnection,
|
||||
) {}
|
||||
async addTask(dto: CreatePipelineTaskInput) {
|
||||
const pipeline = await this.pipelineRepository.findOneOrFail({
|
||||
where: { id: dto.pipelineId },
|
||||
relations: ['project'],
|
||||
});
|
||||
const hasUnfinishedTask = await this.repository
|
||||
.findOne({
|
||||
pipelineId: dto.pipelineId,
|
||||
commit: dto.commit,
|
||||
status: In([TaskStatuses.pending, TaskStatuses.working]),
|
||||
})
|
||||
.then((val) => !isNil(val));
|
||||
if (hasUnfinishedTask) {
|
||||
throw new ConflictException(
|
||||
'There are the same tasks among the unfinished tasks!',
|
||||
);
|
||||
}
|
||||
// const hasUnfinishedTask = await this.repository
|
||||
// .findOne({
|
||||
// pipelineId: dto.pipelineId,
|
||||
// commit: dto.commit,
|
||||
// status: In([TaskStatuses.pending, TaskStatuses.working]),
|
||||
// })
|
||||
// .then((val) => !isNil(val));
|
||||
// if (hasUnfinishedTask) {
|
||||
// throw new ConflictException(
|
||||
// 'There are the same tasks among the unfinished tasks!',
|
||||
// );
|
||||
// }
|
||||
const task = await this.repository.save(this.repository.create(dto));
|
||||
task.pipeline = pipeline;
|
||||
|
||||
const tasksKey = this.getRedisTokens(pipeline)[1];
|
||||
const redis = this.redis.getClient();
|
||||
await redis.lpush(tasksKey, JSON.stringify(task));
|
||||
log(
|
||||
'add task %s:%s-%s',
|
||||
task.id,
|
||||
task.pipeline.branch,
|
||||
task.commit.slice(0, 6),
|
||||
);
|
||||
await this.doNextTask(pipeline);
|
||||
this.amqpConnection.publish('new-pipeline-task', 'mac', task);
|
||||
return task;
|
||||
}
|
||||
|
||||
|
@ -105,4 +105,10 @@ export class ReposService {
|
||||
encodeURIComponent(`${task.pipeline.name}-${task.commit}`),
|
||||
);
|
||||
}
|
||||
|
||||
async checkout4Task(task: PipelineTask): Promise<string> {
|
||||
const path = this.getWorkspaceRootByTask(task);
|
||||
await this.checkout(task, path);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user