feat: 任务完成后更新数据库中的数据。

This commit is contained in:
Ivan Li 2021-06-20 00:05:24 +08:00
parent 0c3310d3a5
commit 133439bb49
4 changed files with 35 additions and 31 deletions

View File

@ -18,6 +18,7 @@ describe('PipelineTaskFlushService', () => {
const redisClient = {
rpush: jest.fn(() => Promise.resolve()),
lrange: jest.fn(() => Promise.resolve()),
expire: jest.fn(() => Promise.resolve()),
};
const module: TestingModule = await Test.createTestingModule({
providers: [

View File

@ -37,11 +37,15 @@ export class PipelineTaskFlushService {
await client.rpush(this.getKey(message.taskId), JSON.stringify(message));
await client.expire(this.getKey(message.taskId), 600); // ten minutes
if (isNil(message.unit) && terminalTaskStatuses.includes(message.status)) {
this.amqpConnection.request({
try {
await this.amqpConnection.request({
exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
routingKey: ROUTE_PIPELINE_TASK_DONE,
payload: { taskId: message.taskId, status: message.status },
});
} catch (error) {
console.log(error);
}
}
}

View File

@ -4,26 +4,15 @@ import { getRepositoryToken } from '@nestjs/typeorm';
import { PipelineTask } from './pipeline-task.entity';
import { Pipeline } from '../pipelines/pipeline.entity';
import { Repository } from 'typeorm';
import { Queue } from 'bull';
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
import { PipelineTaskFlushService } from './pipeline-task-flush.service';
import { getLoggerToken, PinoLogger } from 'nestjs-pino';
describe('PipelineTasksService', () => {
let service: PipelineTasksService;
let module: TestingModule;
let taskRepository: Repository<PipelineTask>;
let pipelineRepository: Repository<Pipeline>;
const getBasePipeline = () =>
({
id: 'test',
name: '测试流水线',
branch: 'master',
workUnitMetadata: {},
project: {
id: 'test-project',
},
} as Pipeline);
let redisClient;
let taskQueue: Queue;
beforeEach(async () => {
module = await Test.createTestingModule({
@ -41,6 +30,14 @@ describe('PipelineTasksService', () => {
provide: AmqpConnection,
useValue: {},
},
{
provide: PipelineTaskFlushService,
useValue: {},
},
{
provide: getLoggerToken(PipelineTasksService.name),
useValue: new PinoLogger({}),
},
],
}).compile();

View File

@ -5,13 +5,10 @@ import { Repository } from 'typeorm';
import { CreatePipelineTaskInput } from './dtos/create-pipeline-task.input';
import { Pipeline } from '../pipelines/pipeline.entity';
import debug from 'debug';
import {
AmqpConnection,
RabbitRPC,
RabbitSubscribe,
} from '@golevelup/nestjs-rabbitmq';
import { AmqpConnection, RabbitRPC } from '@golevelup/nestjs-rabbitmq';
import {
EXCHANGE_PIPELINE_TASK_FANOUT,
EXCHANGE_PIPELINE_TASK_TOPIC,
QUEUE_PIPELINE_TASK_DONE,
ROUTE_PIPELINE_TASK_DONE,
} from './pipeline-tasks.constants';
@ -79,7 +76,7 @@ export class PipelineTasksService {
}
@RabbitRPC({
exchange: EXCHANGE_PIPELINE_TASK_FANOUT,
exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
routingKey: ROUTE_PIPELINE_TASK_DONE,
queue: QUEUE_PIPELINE_TASK_DONE,
queueOptions: {
@ -87,13 +84,13 @@ export class PipelineTasksService {
durable: true,
},
})
async updateByEvent({ id }: { id: string }) {
async updateByEvent({ taskId }: { taskId: string }) {
try {
const [events, task] = await Promise.all([
this.eventFlushService.read(id),
this.findTaskById(id),
this.eventFlushService.read(taskId),
this.findTaskById(taskId),
]);
this.logger.info('[updateByEvent] start. taskId: %s', id);
this.logger.info('[updateByEvent] start. taskId: %s', taskId);
for (const event of events) {
if (isNil(event.unit)) {
@ -130,10 +127,15 @@ export class PipelineTasksService {
l.status = event.status;
}
}
await this.repository.update({ id }, task);
this.logger.info('[updateByEvent] success. taskId: %s', id);
await this.repository.update({ id: taskId }, task);
return task;
this.logger.info('[updateByEvent] success. taskId: %s', taskId);
} catch (error) {
this.logger.error({ error }, '[updateByEvent] failed. taskId: %s', id);
this.logger.error(
{ error },
'[updateByEvent] failed. taskId: %s',
taskId,
);
}
}
}