feat(pipeline-tasks): 完善 doTask

This commit is contained in:
Ivan 2021-03-05 17:12:06 +08:00
parent f39c801fc2
commit 7913184174
8 changed files with 245 additions and 19 deletions

View File

@ -0,0 +1,15 @@
import { PipelineTask } from './../pipeline-task.entity';
export class PipelineTaskLogMessage {
task: PipelineTask;
time: Date;
message: string;
static create(task: PipelineTask, message: string) {
return Object.assign(new PipelineTaskLogMessage(), {
task,
message,
time: new Date(),
});
}
}

View File

@ -5,5 +5,5 @@ export class PipelineTaskLogs {
status: TaskStatuses;
startedAt?: Date;
endedAt?: Date;
logs: string[];
logs = '';
}

View File

@ -1,14 +1,22 @@
import { PIPELINE_TASK_LOG_QUEUE } from './pipeline-tasks.constants';
import { Test, TestingModule } from '@nestjs/testing';
import { Job } from 'bull';
import { Job, Queue } from 'bull';
import { join } from 'path';
import { ReposService } from '../repos/repos.service';
import { PipelineUnits } from './enums/pipeline-units.enum';
import { PipelineTaskConsumer } from './pipeline-task.consumer';
import { PipelineTask } from './pipeline-task.entity';
import { PipelineTasksService } from './pipeline-tasks.service';
import { getQueueToken } from '@nestjs/bull';
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
import { Pipeline } from '../pipelines/pipeline.entity';
import { Project } from '../projects/project.entity';
import { TaskStatuses } from './enums/task-statuses.enum';
describe('PipelineTaskConsumer', () => {
let consumer: PipelineTaskConsumer;
let tasksService: PipelineTasksService;
let logQueue: Queue<PipelineTaskLogMessage>;
const getJob = () =>
({
data: {
@ -24,20 +32,29 @@ describe('PipelineTaskConsumer', () => {
provide: PipelineTasksService,
useValue: {
doNextTask: () => undefined,
updateTask: async (value) => value,
},
},
{
provide: ReposService,
useValue: {
getWorkspaceRootByTask: () => 'workspace-root',
checkout: async () => undefined,
},
},
PipelineTaskConsumer,
{
provide: getQueueToken(PIPELINE_TASK_LOG_QUEUE),
useValue: {
add: () => undefined,
},
},
],
}).compile();
tasksService = module.get(PipelineTasksService);
consumer = module.get(PipelineTaskConsumer);
logQueue = module.get(getQueueToken(PIPELINE_TASK_LOG_QUEUE));
});
it('should be defined', () => {
@ -53,7 +70,123 @@ describe('PipelineTaskConsumer', () => {
});
});
describe('runScript', () => {
it('should success and log right message', async () => {
const add = jest.spyOn(logQueue, 'add');
await expect(
consumer
.runScript(
'node one-second-work.js',
join(__dirname, '../../test/data'),
)
.then((arr) => arr.join('')),
).resolves.toMatch(/10.+20.+30.+40.+50.+60.+70.+80.+90/s);
// expect(add).toHaveBeenCalledTimes(10);
expect(
((add.mock.calls[8][0] as unknown) as PipelineTaskLogMessage).message,
).toMatch(/^90/);
});
it('should failed and log right message', async () => {
const add = jest.spyOn(logQueue, 'add');
await expect(
consumer.runScript(
'node bad-work.js',
join(__dirname, '../../test/data'),
),
).rejects.toThrowError(/Error Message/);
// expect(add).toHaveBeenCalledTimes(8);
const logs = add.mock.calls
.map((call) => ((call[0] as unknown) as PipelineTaskLogMessage).message)
.join('');
expect(logs).toMatch(/10.+20.+30.+40.+50/s);
});
it('should log with task', async () => {
const task = new PipelineTask();
task.id = 'test';
const add = jest.spyOn(logQueue, 'add');
await expect(
consumer.runScript(
'node bad-work.js',
join(__dirname, '../../test/data'),
task,
),
).rejects.toThrowError(/Error Message 2/);
expect(
((add.mock.calls[2][0] as unknown) as PipelineTaskLogMessage).task,
).toMatchObject(task);
});
});
describe('doTask', () => {
it('should do all task', () => {});
let task: PipelineTask;
beforeEach(() => {
task = new PipelineTask();
task.id = 'test-id';
task.logs = [];
task.pipeline = new Pipeline();
task.pipeline.workUnitMetadata = {
version: 1,
units: [
{
type: PipelineUnits.checkout,
scripts: [],
},
{
type: PipelineUnits.installDependencies,
scripts: ["echo ' Hello, Fennec!'"],
},
],
};
task.units = task.pipeline.workUnitMetadata.units.map(
(unit) => unit.type,
);
task.pipeline.project = new Project();
task.pipeline.project.name = 'test-project';
});
it('should do all task', async () => {
const job: Job = ({
data: task,
update: jest.fn().mockImplementation(() => undefined),
} as unknown) as Job;
const runScript = jest
.spyOn(consumer, 'runScript')
.mockImplementation(async () => []);
const updateTask = jest.spyOn(tasksService, 'updateTask');
await consumer.doTask(job);
expect(runScript).toHaveBeenCalledTimes(1);
expect(updateTask).toHaveBeenCalledTimes(1);
const taskDto: PipelineTask = updateTask.mock.calls[0][0];
expect(taskDto.logs).toHaveLength(2);
expect(taskDto.logs[0].status).toEqual(TaskStatuses.success);
expect(taskDto.logs[0].unit).toEqual(PipelineUnits.checkout);
expect(taskDto.logs[1].logs).toMatch(/Hello, Fennec!/);
});
it('should log error message', async () => {
const job: Job = ({
data: task,
update: jest.fn().mockImplementation(() => undefined),
} as unknown) as Job;
const runScript = jest
.spyOn(consumer, 'runScript')
.mockImplementation(async () => {
throw new Error('bad message');
});
const updateTask = jest.spyOn(tasksService, 'updateTask');
await consumer.doTask(job);
expect(updateTask).toHaveBeenCalledTimes(1);
const taskDto: PipelineTask = updateTask.mock.calls[0][0];
expect(taskDto.logs).toHaveLength(2);
expect(taskDto.logs[0].status).toEqual(TaskStatuses.success);
expect(taskDto.logs[1].status).toEqual(TaskStatuses.failed);
expect(taskDto.logs[1].logs).toMatch(/bad message/);
});
});
});

View File

@ -1,20 +1,39 @@
import { PipelineTaskLogs } from './models/pipeline-task-logs.model';
import { ReposService } from './../repos/repos.service';
import { OnQueueCompleted, Process, Processor } from '@nestjs/bull';
import { Job } from 'bull';
import {
InjectQueue,
OnQueueCompleted,
Process,
Processor,
} from '@nestjs/bull';
import { Job, Queue } from 'bull';
import { spawn } from 'child_process';
import { PipelineTask } from './pipeline-task.entity';
import { PIPELINE_TASK_QUEUE } from './pipeline-tasks.constants';
import {
PIPELINE_TASK_LOG_QUEUE,
PIPELINE_TASK_QUEUE,
} from './pipeline-tasks.constants';
import { PipelineTasksService } from './pipeline-tasks.service';
import { ApplicationException } from '../commons/exceptions/application.exception';
import { PipelineUnits } from './enums/pipeline-units.enum';
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
import { TaskStatuses } from './enums/task-statuses.enum';
@Processor(PIPELINE_TASK_QUEUE)
export class PipelineTaskConsumer {
constructor(
private readonly service: PipelineTasksService,
private readonly reposService: ReposService,
@InjectQueue(PIPELINE_TASK_LOG_QUEUE)
private readonly logQueue: Queue<PipelineTaskLogMessage>,
) {}
@Process()
async doTask({ data: task }: Job<PipelineTask>) {
async doTask({ data: task, update }: Job<PipelineTask>) {
if (task.pipeline.workUnitMetadata.version !== 1) {
throw new ApplicationException(
'work unit metadata version is not match.',
);
}
const workspaceRoot = this.reposService.getWorkspaceRootByTask(task);
const units = task.units.map(
@ -24,31 +43,70 @@ export class PipelineTaskConsumer {
) ?? { type: type, scripts: [] },
);
for (const unit of units) {
// 检出代码时,不执行其他脚本。
if (unit.type === PipelineUnits.checkout) {
await this.reposService.checkout(task, workspaceRoot);
continue;
}
for (const script of unit.scripts) {
await this.runScript(task, script, workspaceRoot);
try {
for (const unit of units) {
const unitLog = new PipelineTaskLogs();
unitLog.unit = unit.type;
unitLog.startedAt = new Date();
try {
// 检出代码时,不执行其他脚本。
if (unit.type === PipelineUnits.checkout) {
await this.reposService.checkout(task, workspaceRoot);
unitLog.status = TaskStatuses.success;
continue;
}
for (const script of unit.scripts) {
unitLog.logs += `[RUN SCRIPT] ${script}`;
const messages = await this.runScript(script, workspaceRoot, task);
unitLog.logs += messages.join('');
}
unitLog.status = TaskStatuses.success;
} catch (err) {
unitLog.status = TaskStatuses.failed;
unitLog.logs += err.message;
throw err;
} finally {
unitLog.endedAt = new Date();
task.logs.push(unitLog);
}
}
} catch (err) {
console.log(err);
} finally {
task = await this.service.updateTask(task);
update(task);
}
}
async runScript(task: PipelineTask, script: string, workspaceRoot: string) {
async runScript(
script: string,
workspaceRoot: string,
task?: PipelineTask,
): Promise<string[]> {
return new Promise((resolve, reject) => {
const errorMessages: string[] = [];
const logs: string[] = [];
const sub = spawn(script, {
shell: true,
cwd: workspaceRoot,
});
sub.stderr.on('data', (data) => errorMessages.push(data));
sub.stderr.on('data', (data: Buffer) => {
const str = data.toString();
errorMessages.push(str);
logs.push(str);
this.logQueue.add(PipelineTaskLogMessage.create(task, str));
});
sub.stdout.on('data', (data: Buffer) => {
const str = data.toString();
logs.push(str);
this.logQueue.add(PipelineTaskLogMessage.create(task, str));
});
sub.addListener('close', (code) => {
if (code === 0) {
return resolve(code);
sub.stdout;
return resolve(logs);
}
return reject(new ApplicationException(errorMessages.join('\n')));
return reject(new ApplicationException(errorMessages.join('')));
});
});
}

View File

@ -1 +1,2 @@
export const PIPELINE_TASK_QUEUE = 'PIPELINE_TASK_QUEUE';
export const PIPELINE_TASK_LOG_QUEUE = 'PIPELINE_TASK_LOG_QUEUE';

View File

@ -62,6 +62,10 @@ export class PipelineTasksService {
}
}
async updateTask(task: PipelineTask) {
return await this.repository.save(task);
}
getRedisTokens(pipeline: Pipeline): [string, string] {
return [`pipeline-${pipeline.id}:lck`, `pipeline-${pipeline.id}:tasks`];
}

8
test/data/bad-work.js Normal file
View File

@ -0,0 +1,8 @@
for (let i = 1; i <= 5; i++) {
console.log(i * 10);
}
console.error('Error Message');
console.error('Error Message 2');
console.log('Bye-bye');
process.exit(1);

View File

@ -0,0 +1,7 @@
let timer;
let count = 0;
setTimeout(() => clearInterval(timer), 1_000);
timer = setInterval(() => {
console.log(++count * 10);
}, 95);