Compare commits
	
		
			34 Commits
		
	
	
		
			feat-api-f
			...
			feat_the_p
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					7d84017f9e | ||
| 
						 | 
					a231a02c28 | ||
| 
						 | 
					7e17de0f15 | ||
| 
						 | 
					9d735c582c | ||
| 
						 | 
					b626eed859 | ||
| 
						 | 
					5b5a657651 | ||
| 
						 | 
					a510f411a7 | ||
| 
						 | 
					246623b5db | ||
| 
						 | 
					37f8ae19be | ||
| 
						 | 
					133439bb49 | ||
| 
						 | 
					646f68d298 | ||
| 
						 | 
					0c3310d3a5 | ||
| 
						 | 
					ead32a1204 | ||
| 
						 | 
					20612d4301 | ||
| 
						 | 
					7091f9df6a | ||
| 
						 | 
					3ee41ece67 | ||
| 
						 | 
					b3a2b11db9 | ||
| 
						 | 
					4041a6fd2a | ||
| 
						 | 
					5a8b699e2f | ||
| 
						 | 
					86c8bce9ea | ||
| 
						 | 
					246c0bd8f8 | ||
| 
						 | 
					24a2f80e46 | ||
| 
						 | 
					0e0781c4c4 | ||
| 
						 | 
					a82f663354 | ||
| 
						 | 
					752db8a0c5 | ||
| 
						 | 
					46fb41f856 | ||
| 
						 | 
					b4307f05d6 | ||
| 
						 | 
					bb3efd3714 | ||
| 
						 | 
					092cf9c418 | ||
| 
						 | 
					039f4b6d15 | ||
| 
						 | 
					22be1ffb33 | ||
| 
						 | 
					ef47f8049e | ||
| 
						 | 
					032aa89b05 | ||
| 
						 | 
					da6bc9a068 | 
							
								
								
									
										0
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
								
								
									
										8
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							@@ -1,11 +1,19 @@
 | 
			
		||||
{
 | 
			
		||||
  "cSpell.words": [
 | 
			
		||||
    "Mutex",
 | 
			
		||||
    "Repos",
 | 
			
		||||
    "amqp",
 | 
			
		||||
    "boardcat",
 | 
			
		||||
    "errout",
 | 
			
		||||
    "fanout",
 | 
			
		||||
    "gitea",
 | 
			
		||||
    "golevelup",
 | 
			
		||||
    "lpush",
 | 
			
		||||
    "lrange",
 | 
			
		||||
    "metatype",
 | 
			
		||||
    "pmessage",
 | 
			
		||||
    "psubscribe",
 | 
			
		||||
    "rabbitmq",
 | 
			
		||||
    "rpop",
 | 
			
		||||
    "rpush"
 | 
			
		||||
  ]
 | 
			
		||||
 
 | 
			
		||||
@@ -14,5 +14,7 @@ db:
 | 
			
		||||
    port: 6379
 | 
			
		||||
    password:
 | 
			
		||||
    prefix: fennec
 | 
			
		||||
  rabbitmq:
 | 
			
		||||
    uri: 'amqp://fennec:fennec@192.168.31.194:5672'
 | 
			
		||||
workspaces:
 | 
			
		||||
  root: '/Users/ivanli/Projects/fennec/workspaces'
 | 
			
		||||
							
								
								
									
										2271
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2271
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										10
									
								
								package.json
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								package.json
									
									
									
									
									
								
							@@ -1,6 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "name": "fennec-be",
 | 
			
		||||
  "version": "0.0.1",
 | 
			
		||||
  "version": "0.1.0",
 | 
			
		||||
  "description": "",
 | 
			
		||||
  "author": "",
 | 
			
		||||
  "private": true,
 | 
			
		||||
@@ -21,6 +21,7 @@
 | 
			
		||||
    "test:e2e": "jest --config ./test/jest-e2e.json"
 | 
			
		||||
  },
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "@golevelup/nestjs-rabbitmq": "^1.16.1",
 | 
			
		||||
    "@nestjs/bull": "^0.3.1",
 | 
			
		||||
    "@nestjs/common": "^7.5.1",
 | 
			
		||||
    "@nestjs/config": "^0.6.2",
 | 
			
		||||
@@ -28,7 +29,7 @@
 | 
			
		||||
    "@nestjs/graphql": "^7.9.8",
 | 
			
		||||
    "@nestjs/platform-express": "^7.5.1",
 | 
			
		||||
    "@nestjs/typeorm": "^7.1.5",
 | 
			
		||||
    "@neuralegion/class-sanitizer": "^0.3.2",
 | 
			
		||||
    "@types/amqplib": "^0.8.0",
 | 
			
		||||
    "@types/bull": "^3.15.0",
 | 
			
		||||
    "@types/ramda": "^0.27.38",
 | 
			
		||||
    "apollo-server-express": "^2.19.2",
 | 
			
		||||
@@ -40,10 +41,13 @@
 | 
			
		||||
    "debug": "^4.3.1",
 | 
			
		||||
    "graphql": "^15.5.0",
 | 
			
		||||
    "graphql-tools": "^7.0.2",
 | 
			
		||||
    "ioredis": "^4.25.0",
 | 
			
		||||
    "js-yaml": "^4.0.0",
 | 
			
		||||
    "nestjs-pino": "^1.4.0",
 | 
			
		||||
    "nestjs-redis": "^1.2.8",
 | 
			
		||||
    "observable-to-async-generator": "^1.0.1-rc",
 | 
			
		||||
    "pg": "^8.5.1",
 | 
			
		||||
    "pino-pretty": "^4.7.1",
 | 
			
		||||
    "ramda": "^0.27.1",
 | 
			
		||||
    "reflect-metadata": "^0.1.13",
 | 
			
		||||
    "rimraf": "^3.0.2",
 | 
			
		||||
@@ -58,9 +62,11 @@
 | 
			
		||||
    "@types/body-parser": "^1.19.0",
 | 
			
		||||
    "@types/debug": "^4.1.5",
 | 
			
		||||
    "@types/express": "^4.17.8",
 | 
			
		||||
    "@types/ioredis": "^4.22.2",
 | 
			
		||||
    "@types/jest": "^26.0.15",
 | 
			
		||||
    "@types/js-yaml": "^4.0.0",
 | 
			
		||||
    "@types/node": "^14.14.6",
 | 
			
		||||
    "@types/pino-pretty": "^4.7.0",
 | 
			
		||||
    "@types/supertest": "^2.0.10",
 | 
			
		||||
    "@typescript-eslint/eslint-plugin": "^4.6.1",
 | 
			
		||||
    "@typescript-eslint/parser": "^4.6.1",
 | 
			
		||||
 
 | 
			
		||||
@@ -15,12 +15,33 @@ import { WebhooksModule } from './webhooks/webhooks.module';
 | 
			
		||||
import { RawBodyMiddleware } from './commons/middlewares/raw-body.middleware';
 | 
			
		||||
import { GiteaWebhooksController } from './webhooks/gitea-webhooks.controller';
 | 
			
		||||
import { ParseBodyMiddleware } from './commons/middlewares/parse-body.middleware';
 | 
			
		||||
import { BullModule } from '@nestjs/bull';
 | 
			
		||||
import { LoggerModule } from 'nestjs-pino';
 | 
			
		||||
 | 
			
		||||
import pinoPretty from 'pino-pretty';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [
 | 
			
		||||
    ConfigModule.forRoot({
 | 
			
		||||
      load: [configuration],
 | 
			
		||||
    }),
 | 
			
		||||
    LoggerModule.forRootAsync({
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => {
 | 
			
		||||
        const isDev = configService.get<'dev' | 'prod'>('env') === 'dev';
 | 
			
		||||
        return {
 | 
			
		||||
          pinoHttp: {
 | 
			
		||||
            prettyPrint: isDev
 | 
			
		||||
              ? {
 | 
			
		||||
                  levelFirst: true,
 | 
			
		||||
                }
 | 
			
		||||
              : false,
 | 
			
		||||
            prettifier: pinoPretty,
 | 
			
		||||
          },
 | 
			
		||||
        };
 | 
			
		||||
      },
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
    TypeOrmModule.forRootAsync({
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
@@ -45,6 +66,17 @@ import { ParseBodyMiddleware } from './commons/middlewares/parse-body.middleware
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
    BullModule.forRootAsync({
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
        redis: {
 | 
			
		||||
          host: configService.get<string>('db.redis.host', 'localhost'),
 | 
			
		||||
          port: configService.get<number>('db.redis.port', undefined),
 | 
			
		||||
          password: configService.get<string>('db.redis.password', undefined),
 | 
			
		||||
        },
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
    ProjectsModule,
 | 
			
		||||
    ReposModule,
 | 
			
		||||
    PipelinesModule,
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +1,10 @@
 | 
			
		||||
import { Module } from '@nestjs/common';
 | 
			
		||||
import { PasswordConverter } from './services/password-converter';
 | 
			
		||||
import { RedisMutexModule } from './redis-mutex/redis-mutex.module';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  providers: [PasswordConverter],
 | 
			
		||||
  exports: [PasswordConverter],
 | 
			
		||||
  exports: [PasswordConverter, RedisMutexModule],
 | 
			
		||||
  imports: [RedisMutexModule],
 | 
			
		||||
})
 | 
			
		||||
export class CommonsModule {}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,11 @@
 | 
			
		||||
import { pick } from 'ramda';
 | 
			
		||||
 | 
			
		||||
export class ApplicationException extends Error {
 | 
			
		||||
  code: number;
 | 
			
		||||
  error: Error;
 | 
			
		||||
 | 
			
		||||
  constructor(
 | 
			
		||||
    message:
 | 
			
		||||
      | string
 | 
			
		||||
      | { error?: Error; message?: string | object; code?: number },
 | 
			
		||||
    message: string | { error?: Error; message?: string | any; code?: number },
 | 
			
		||||
  ) {
 | 
			
		||||
    if (message instanceof Object) {
 | 
			
		||||
      super();
 | 
			
		||||
@@ -18,4 +18,8 @@ export class ApplicationException extends Error {
 | 
			
		||||
      super((message as unknown) as any);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  toJSON() {
 | 
			
		||||
    return pick(['code', 'message'], this);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
import { ArgumentMetadata, Injectable, PipeTransform } from '@nestjs/common';
 | 
			
		||||
import { sanitize } from '@neuralegion/class-sanitizer/dist';
 | 
			
		||||
import { plainToClass } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class SanitizePipe implements PipeTransform {
 | 
			
		||||
@@ -12,13 +12,11 @@ export class SanitizePipe implements PipeTransform {
 | 
			
		||||
      return value;
 | 
			
		||||
    }
 | 
			
		||||
    const constructorFunction = metadata.metatype;
 | 
			
		||||
    if (!constructorFunction) {
 | 
			
		||||
    if (!constructorFunction || value instanceof constructorFunction) {
 | 
			
		||||
      return value;
 | 
			
		||||
    }
 | 
			
		||||
    value = Object.assign(new constructorFunction(), value);
 | 
			
		||||
    try {
 | 
			
		||||
      sanitize(value);
 | 
			
		||||
      return value;
 | 
			
		||||
      return plainToClass(constructorFunction, value);
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      console.error(err);
 | 
			
		||||
      throw err;
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										10
									
								
								src/commons/redis-mutex/redis-mutex.module.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								src/commons/redis-mutex/redis-mutex.module.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
import { Module } from '@nestjs/common';
 | 
			
		||||
import { RedisMutexService } from './redis-mutex.service';
 | 
			
		||||
import { RedisModule } from 'nestjs-redis';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [RedisModule],
 | 
			
		||||
  providers: [RedisMutexService],
 | 
			
		||||
  exports: [RedisMutexService],
 | 
			
		||||
})
 | 
			
		||||
export class RedisMutexModule {}
 | 
			
		||||
							
								
								
									
										18
									
								
								src/commons/redis-mutex/redis-mutex.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								src/commons/redis-mutex/redis-mutex.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,18 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { RedisMutexService } from './redis-mutex.service';
 | 
			
		||||
 | 
			
		||||
describe('RedisMutexService', () => {
 | 
			
		||||
  let service: RedisMutexService;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [RedisMutexService],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    service = module.get<RedisMutexService>(RedisMutexService);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(service).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										71
									
								
								src/commons/redis-mutex/redis-mutex.service.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										71
									
								
								src/commons/redis-mutex/redis-mutex.service.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,71 @@
 | 
			
		||||
import { Injectable } from '@nestjs/common';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import * as uuid from 'uuid';
 | 
			
		||||
import { ApplicationException } from '../exceptions/application.exception';
 | 
			
		||||
 | 
			
		||||
export interface RedisMutexOption {
 | 
			
		||||
  /**
 | 
			
		||||
   * seconds
 | 
			
		||||
   */
 | 
			
		||||
  expires?: number;
 | 
			
		||||
  /**
 | 
			
		||||
   * seconds
 | 
			
		||||
   */
 | 
			
		||||
  timeout?: number | null;
 | 
			
		||||
  /**
 | 
			
		||||
   * milliseconds
 | 
			
		||||
   */
 | 
			
		||||
  retryDelay?: number;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class RedisMutexService {
 | 
			
		||||
  constructor(private readonly redisClient: RedisService) {}
 | 
			
		||||
 | 
			
		||||
  public async lock(
 | 
			
		||||
    key: string,
 | 
			
		||||
    { expires = 100, timeout = 10, retryDelay = 100 }: RedisMutexOption = {
 | 
			
		||||
      expires: 100,
 | 
			
		||||
      timeout: 10,
 | 
			
		||||
      retryDelay: 100,
 | 
			
		||||
    },
 | 
			
		||||
  ) {
 | 
			
		||||
    const redisKey = `${'mutex-lock'}:${key}`;
 | 
			
		||||
    const redis = this.redisClient.getClient();
 | 
			
		||||
    const value = uuid.v4();
 | 
			
		||||
    const timeoutAt = timeout ? Date.now() + timeout * 1000 : null;
 | 
			
		||||
 | 
			
		||||
    while (
 | 
			
		||||
      !(await redis
 | 
			
		||||
        .set(redisKey, value, 'EX', expires, 'NX')
 | 
			
		||||
        .then(() => true)
 | 
			
		||||
        .catch(() => false))
 | 
			
		||||
    ) {
 | 
			
		||||
      if (timeoutAt && timeoutAt > Date.now()) {
 | 
			
		||||
        throw new ApplicationException('lock timeout');
 | 
			
		||||
      }
 | 
			
		||||
      await new Promise((resolve) => setTimeout(resolve, retryDelay));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const renewTimer = setInterval(() => {
 | 
			
		||||
      redis.expire(redisKey, expires);
 | 
			
		||||
    }, (expires * 1000) / 2);
 | 
			
		||||
 | 
			
		||||
    return async () => {
 | 
			
		||||
      clearInterval(renewTimer);
 | 
			
		||||
      await redis.eval(
 | 
			
		||||
        `
 | 
			
		||||
      if redis.call("get", KEYS[1]) == ARGV[1]
 | 
			
		||||
      then
 | 
			
		||||
          return redis.call("del", KEYS[1])
 | 
			
		||||
      else
 | 
			
		||||
          return 0
 | 
			
		||||
      end
 | 
			
		||||
    `,
 | 
			
		||||
        1,
 | 
			
		||||
        redisKey,
 | 
			
		||||
        value,
 | 
			
		||||
      );
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -53,8 +53,22 @@ export class BaseDbService<Entity extends AppBaseEntity> extends TypeormHelper {
 | 
			
		||||
  async isDuplicateEntityForUpdate<Dto extends Entity>(
 | 
			
		||||
    id: string,
 | 
			
		||||
    dto: Partial<Dto>,
 | 
			
		||||
    extendsFields?: Array<keyof Dto & string>,
 | 
			
		||||
  ): Promise<false | never>;
 | 
			
		||||
  async isDuplicateEntityForUpdate<Dto extends Entity>(
 | 
			
		||||
    old: Entity,
 | 
			
		||||
    dto: Partial<Dto>,
 | 
			
		||||
    extendsFields?: Array<keyof Dto & string>,
 | 
			
		||||
  ): Promise<false | never>;
 | 
			
		||||
  async isDuplicateEntityForUpdate<Dto extends Entity>(
 | 
			
		||||
    id: string | Entity,
 | 
			
		||||
    dto: Partial<Dto>,
 | 
			
		||||
    extendsFields: Array<keyof Dto & string> = [],
 | 
			
		||||
  ): Promise<false | never> {
 | 
			
		||||
    if (typeof id !== 'string') {
 | 
			
		||||
      dto = Object.assign({}, id, dto);
 | 
			
		||||
      id = id.id;
 | 
			
		||||
    }
 | 
			
		||||
    const qb = this.repository.createQueryBuilder('entity');
 | 
			
		||||
    const compareFields = this.getCompareFields(dto, [
 | 
			
		||||
      ...this.uniqueFields,
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										21
									
								
								src/commons/utils/rabbit-mq.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								src/commons/utils/rabbit-mq.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,21 @@
 | 
			
		||||
import { hostname } from 'os';
 | 
			
		||||
 | 
			
		||||
export function getInstanceName() {
 | 
			
		||||
  return hostname();
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function getSelfInstanceRouteKey(key: string) {
 | 
			
		||||
  return getAppInstanceRouteKey(key, getInstanceName());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function getAppInstanceRouteKey(key: string, appInstance?: string) {
 | 
			
		||||
  return appInstance ? `${key}.${appInstance}` : key;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function getSelfInstanceQueueKey(key: string) {
 | 
			
		||||
  return getAppInstanceQueueKey(key, getInstanceName());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export function getAppInstanceQueueKey(key: string, appInstance?: string) {
 | 
			
		||||
  return appInstance ? `${key}.${appInstance}` : key;
 | 
			
		||||
}
 | 
			
		||||
@@ -11,3 +11,5 @@ registerEnumType(TaskStatuses, {
 | 
			
		||||
  name: 'TaskStatuses',
 | 
			
		||||
  description: '任务状态',
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
export const terminalTaskStatuses = [TaskStatuses.success, TaskStatuses.failed];
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										25
									
								
								src/pipeline-tasks/models/pipeline-task-event.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								src/pipeline-tasks/models/pipeline-task-event.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,25 @@
 | 
			
		||||
import { Field, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { PipelineUnits } from '../enums/pipeline-units.enum';
 | 
			
		||||
import { TaskStatuses } from '../enums/task-statuses.enum';
 | 
			
		||||
import { Type } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class PipelineTaskEvent {
 | 
			
		||||
  @Field()
 | 
			
		||||
  taskId: string;
 | 
			
		||||
  @Field()
 | 
			
		||||
  pipelineId: string;
 | 
			
		||||
  @Field()
 | 
			
		||||
  projectId: string;
 | 
			
		||||
  @Field(() => PipelineUnits, { nullable: true })
 | 
			
		||||
  unit: PipelineUnits | null;
 | 
			
		||||
  @Field()
 | 
			
		||||
  @Type(() => Date)
 | 
			
		||||
  emittedAt: Date;
 | 
			
		||||
  @Field()
 | 
			
		||||
  message: string;
 | 
			
		||||
  @Field()
 | 
			
		||||
  messageType: 'stdout' | 'stderr' | 'stdin';
 | 
			
		||||
  @Field(() => TaskStatuses)
 | 
			
		||||
  status: TaskStatuses;
 | 
			
		||||
}
 | 
			
		||||
@@ -1,6 +1,7 @@
 | 
			
		||||
import { PipelineTask } from './../pipeline-task.entity';
 | 
			
		||||
import { PipelineUnits } from '../enums/pipeline-units.enum';
 | 
			
		||||
import { Field, HideField, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { Type } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class PipelineTaskLogMessage {
 | 
			
		||||
@@ -9,6 +10,7 @@ export class PipelineTaskLogMessage {
 | 
			
		||||
  @Field(() => PipelineUnits, { nullable: true })
 | 
			
		||||
  unit?: PipelineUnits;
 | 
			
		||||
  @Field()
 | 
			
		||||
  @Type(() => Date)
 | 
			
		||||
  time: Date;
 | 
			
		||||
  @Field()
 | 
			
		||||
  message: string;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,7 @@
 | 
			
		||||
import { TaskStatuses } from '../enums/task-statuses.enum';
 | 
			
		||||
import { PipelineUnits } from '../enums/pipeline-units.enum';
 | 
			
		||||
import { Field, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { Type } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class PipelineTaskLogs {
 | 
			
		||||
@@ -8,7 +9,9 @@ export class PipelineTaskLogs {
 | 
			
		||||
  unit: PipelineUnits;
 | 
			
		||||
  @Field(() => TaskStatuses)
 | 
			
		||||
  status: TaskStatuses;
 | 
			
		||||
  @Type(() => Date)
 | 
			
		||||
  startedAt?: Date;
 | 
			
		||||
  @Type(() => Date)
 | 
			
		||||
  endedAt?: Date;
 | 
			
		||||
  logs = '';
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,10 @@
 | 
			
		||||
import { InputType, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { Field, InputType, Int, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { WorkUnit } from './work-unit.model';
 | 
			
		||||
 | 
			
		||||
@InputType('WorkUnitMetadataInput')
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class WorkUnitMetadata {
 | 
			
		||||
  @Field(() => Int)
 | 
			
		||||
  version = 1;
 | 
			
		||||
  units: WorkUnit[];
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										88
									
								
								src/pipeline-tasks/pipeline-task-flush.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										88
									
								
								src/pipeline-tasks/pipeline-task-flush.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,88 @@
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import { PipelineTaskFlushService } from './pipeline-task-flush.service';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTaskFlushService', () => {
 | 
			
		||||
  let service: PipelineTaskFlushService;
 | 
			
		||||
  let redisService: RedisService;
 | 
			
		||||
  let amqpConnection: AmqpConnection;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const redisClient = {
 | 
			
		||||
      rpush: jest.fn(() => Promise.resolve()),
 | 
			
		||||
      lrange: jest.fn(() => Promise.resolve()),
 | 
			
		||||
      expire: jest.fn(() => Promise.resolve()),
 | 
			
		||||
    };
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        PipelineTaskFlushService,
 | 
			
		||||
        {
 | 
			
		||||
          provide: RedisService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            getClient() {
 | 
			
		||||
              return redisClient;
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: AmqpConnection,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            request: jest.fn(() => Promise.resolve()),
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    service = module.get<PipelineTaskFlushService>(PipelineTaskFlushService);
 | 
			
		||||
    redisService = module.get<RedisService>(RedisService);
 | 
			
		||||
    amqpConnection = module.get<AmqpConnection>(AmqpConnection);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(service).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('write', () => {
 | 
			
		||||
    const amqpMsg = {
 | 
			
		||||
      properties: { headers: { sender: 'test' } },
 | 
			
		||||
    } as any;
 | 
			
		||||
    it('normal', async () => {
 | 
			
		||||
      const testEvent = new PipelineTaskEvent();
 | 
			
		||||
      testEvent.taskId = 'test';
 | 
			
		||||
      testEvent.status = TaskStatuses.working;
 | 
			
		||||
      const rpush = jest.spyOn(redisService.getClient(), 'rpush');
 | 
			
		||||
      const request = jest.spyOn(amqpConnection, 'request');
 | 
			
		||||
      await service.write(testEvent, amqpMsg);
 | 
			
		||||
      expect(rpush).toBeCalledTimes(1);
 | 
			
		||||
      expect(rpush.mock.calls[0][0]).toEqual('p-task:log:test');
 | 
			
		||||
      expect(rpush.mock.calls[0][1]).toEqual(JSON.stringify(testEvent));
 | 
			
		||||
      expect(request).toBeCalledTimes(1);
 | 
			
		||||
    });
 | 
			
		||||
    it('event for which task done', async () => {
 | 
			
		||||
      const testEvent = new PipelineTaskEvent();
 | 
			
		||||
      testEvent.taskId = 'test';
 | 
			
		||||
      testEvent.status = TaskStatuses.success;
 | 
			
		||||
      const rpush = jest.spyOn(redisService.getClient(), 'rpush');
 | 
			
		||||
      const request = jest.spyOn(amqpConnection, 'request');
 | 
			
		||||
      await service.write(testEvent, amqpMsg);
 | 
			
		||||
      expect(rpush).toBeCalledTimes(1);
 | 
			
		||||
      expect(request).toBeCalledTimes(1);
 | 
			
		||||
      expect(request.mock.calls[0][0]).toMatchObject({
 | 
			
		||||
        exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
        routingKey: ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
        payload: {
 | 
			
		||||
          taskId: 'test',
 | 
			
		||||
          status: TaskStatuses.success,
 | 
			
		||||
          runOn: 'test',
 | 
			
		||||
        },
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										66
									
								
								src/pipeline-tasks/pipeline-task-flush.service.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										66
									
								
								src/pipeline-tasks/pipeline-task-flush.service.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,66 @@
 | 
			
		||||
import { AmqpConnection, RabbitSubscribe } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { Injectable } from '@nestjs/common';
 | 
			
		||||
import { ConsumeMessage } from 'amqplib';
 | 
			
		||||
import { deserialize } from 'class-transformer';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import { isNil } from 'ramda';
 | 
			
		||||
import { getSelfInstanceQueueKey } from '../commons/utils/rabbit-mq';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_LOG,
 | 
			
		||||
  QUEUE_WRITE_PIPELINE_TASK_LOG,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class PipelineTaskFlushService {
 | 
			
		||||
  constructor(
 | 
			
		||||
    private readonly redisService: RedisService,
 | 
			
		||||
    private readonly amqpConnection: AmqpConnection,
 | 
			
		||||
  ) {}
 | 
			
		||||
 | 
			
		||||
  @RabbitSubscribe({
 | 
			
		||||
    exchange: EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
    routingKey: ROUTE_PIPELINE_TASK_LOG,
 | 
			
		||||
    queue: getSelfInstanceQueueKey(QUEUE_WRITE_PIPELINE_TASK_LOG),
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
      durable: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async write(message: PipelineTaskEvent, amqpMsg: ConsumeMessage) {
 | 
			
		||||
    const client = this.redisService.getClient();
 | 
			
		||||
    await client.rpush(this.getKey(message.taskId), JSON.stringify(message));
 | 
			
		||||
    await client.expire(this.getKey(message.taskId), 600); // ten minutes
 | 
			
		||||
    if (isNil(message.unit)) {
 | 
			
		||||
      try {
 | 
			
		||||
        await this.amqpConnection.request({
 | 
			
		||||
          exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
          routingKey: ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
          payload: {
 | 
			
		||||
            taskId: message.taskId,
 | 
			
		||||
            status: message.status,
 | 
			
		||||
            runOn: amqpMsg.properties.headers.sender,
 | 
			
		||||
          },
 | 
			
		||||
        });
 | 
			
		||||
      } catch (error) {
 | 
			
		||||
        console.log(error);
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async read(taskId: string) {
 | 
			
		||||
    const raw = await this.redisService
 | 
			
		||||
      .getClient()
 | 
			
		||||
      .lrange(this.getKey(taskId), 0, -1);
 | 
			
		||||
    return raw.map((it) => deserialize(PipelineTaskEvent, it));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  private getKey(taskId: string) {
 | 
			
		||||
    return `p-task:log:${taskId}`;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,25 +0,0 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTaskLogsService', () => {
 | 
			
		||||
  let service: PipelineTaskLogsService;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        PipelineTaskLogsService,
 | 
			
		||||
        {
 | 
			
		||||
          provide: RedisService,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    service = module.get<PipelineTaskLogsService>(PipelineTaskLogsService);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(service).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
@@ -1,78 +0,0 @@
 | 
			
		||||
import { Injectable } from '@nestjs/common';
 | 
			
		||||
import { log } from 'console';
 | 
			
		||||
import { PubSub } from 'graphql-subscriptions';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import { find, omit, propEq } from 'ramda';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
 | 
			
		||||
import { PipelineTaskLogs } from './models/pipeline-task-logs.model';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
 | 
			
		||||
const LOG_TIMEOUT_SECONDS = 10_000;
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class PipelineTaskLogsService {
 | 
			
		||||
  constructor(private readonly redisService: RedisService) {}
 | 
			
		||||
 | 
			
		||||
  pubSub = new PubSub();
 | 
			
		||||
 | 
			
		||||
  get redis() {
 | 
			
		||||
    return this.redisService.getClient();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  getKeys(task: PipelineTask) {
 | 
			
		||||
    return `ptl:${task.id}`;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async recordLog(log: PipelineTaskLogMessage) {
 | 
			
		||||
    const logDto = omit(['task'], log);
 | 
			
		||||
    await Promise.all([
 | 
			
		||||
      this.pubSub.publish(this.getKeys(log.task), logDto),
 | 
			
		||||
      this.redis
 | 
			
		||||
        .expire(this.getKeys(log.task), LOG_TIMEOUT_SECONDS)
 | 
			
		||||
        .then(() =>
 | 
			
		||||
          this.redis.rpush(this.getKeys(log.task), JSON.stringify(logDto)),
 | 
			
		||||
        ),
 | 
			
		||||
    ]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async readLog(task: PipelineTask): Promise<PipelineTaskLogMessage[]> {
 | 
			
		||||
    return await this.redis.lrange(this.getKeys(task), 0, -1).then((items) =>
 | 
			
		||||
      items.map((item) => {
 | 
			
		||||
        const log = JSON.parse(item) as PipelineTaskLogMessage;
 | 
			
		||||
        log.task = task;
 | 
			
		||||
        log.time = new Date(log.time);
 | 
			
		||||
        return log;
 | 
			
		||||
      }),
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async readLogsAsPipelineTaskLogs(
 | 
			
		||||
    task: PipelineTask,
 | 
			
		||||
  ): Promise<PipelineTaskLogs[]> {
 | 
			
		||||
    const logs = await this.readLog(task);
 | 
			
		||||
    const taskLogs: PipelineTaskLogs[] = [];
 | 
			
		||||
    for (const log of logs) {
 | 
			
		||||
      const taskLog = find<PipelineTaskLogs>(
 | 
			
		||||
        propEq('unit', log.unit),
 | 
			
		||||
        taskLogs,
 | 
			
		||||
      );
 | 
			
		||||
      if (!taskLog) {
 | 
			
		||||
        taskLogs.push({
 | 
			
		||||
          unit: (log.unit as unknown) as PipelineUnits,
 | 
			
		||||
          status: TaskStatuses.working,
 | 
			
		||||
          startedAt: log.time,
 | 
			
		||||
          logs: log.message,
 | 
			
		||||
        });
 | 
			
		||||
      } else {
 | 
			
		||||
        taskLog.logs += log.message;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    return taskLogs;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  watchLogs(task: PipelineTask) {
 | 
			
		||||
    return this.pubSub.asyncIterator(this.getKeys(task));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,242 +0,0 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { Job } from 'bull';
 | 
			
		||||
import { join } from 'path';
 | 
			
		||||
import { ReposService } from '../repos/repos.service';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { PipelineTaskConsumer } from './pipeline-task.consumer';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { PipelineTasksService } from './pipeline-tasks.service';
 | 
			
		||||
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { Project } from '../projects/project.entity';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import { ApplicationException } from '../commons/exceptions/application.exception';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTaskConsumer', () => {
 | 
			
		||||
  let consumer: PipelineTaskConsumer;
 | 
			
		||||
  let tasksService: PipelineTasksService;
 | 
			
		||||
  let logsService: PipelineTaskLogsService;
 | 
			
		||||
  const getJob = () =>
 | 
			
		||||
    ({
 | 
			
		||||
      data: {
 | 
			
		||||
        pipelineId: 'test',
 | 
			
		||||
        units: [PipelineUnits.checkout, PipelineUnits.test],
 | 
			
		||||
      },
 | 
			
		||||
    } as Job<PipelineTask>);
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        {
 | 
			
		||||
          provide: PipelineTasksService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            doNextTask: () => undefined,
 | 
			
		||||
            updateTask: async (value) => value,
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: ReposService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            getWorkspaceRootByTask: () => 'workspace-root',
 | 
			
		||||
            checkout: async () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: PipelineTaskLogsService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            recordLog: async () => undefined,
 | 
			
		||||
            readLogsAsPipelineTaskLogs: async () => [],
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        PipelineTaskConsumer,
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    tasksService = module.get(PipelineTasksService);
 | 
			
		||||
    logsService = module.get(PipelineTaskLogsService);
 | 
			
		||||
    consumer = module.get(PipelineTaskConsumer);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(consumer).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('onCompleted', () => {
 | 
			
		||||
    it('should call doNextTask()', () => {
 | 
			
		||||
      const job = getJob();
 | 
			
		||||
      const doNextTask = jest.spyOn(tasksService, 'doNextTask');
 | 
			
		||||
      consumer.onCompleted(job);
 | 
			
		||||
      expect(doNextTask).toHaveBeenCalledTimes(1);
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('runScript', () => {
 | 
			
		||||
    let logText: string;
 | 
			
		||||
    let errorText: string;
 | 
			
		||||
    let recordLog: jest.SpyInstance;
 | 
			
		||||
    beforeEach(() => {
 | 
			
		||||
      logText = '';
 | 
			
		||||
      errorText = '';
 | 
			
		||||
      recordLog = jest
 | 
			
		||||
        .spyOn(logsService, 'recordLog')
 | 
			
		||||
        .mockImplementation(async (log: PipelineTaskLogMessage) => {
 | 
			
		||||
          logText += log.message;
 | 
			
		||||
          if (log.isError) {
 | 
			
		||||
            errorText += log.message;
 | 
			
		||||
          }
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    it('should success and log right message', async () => {
 | 
			
		||||
      await consumer.runScript(
 | 
			
		||||
        'node one-second-work.js',
 | 
			
		||||
        join(__dirname, '../../test/data'),
 | 
			
		||||
      );
 | 
			
		||||
      expect(logText).toMatch(/10.+20.+30.+40.+50.+60.+70.+80.+90/s);
 | 
			
		||||
      expect(recordLog).toHaveBeenCalledTimes(10);
 | 
			
		||||
      expect(
 | 
			
		||||
        ((recordLog.mock.calls[8][0] as unknown) as PipelineTaskLogMessage)
 | 
			
		||||
          .message,
 | 
			
		||||
      ).toMatch(/^90/);
 | 
			
		||||
    });
 | 
			
		||||
    it('should failed and log right message', async () => {
 | 
			
		||||
      await expect(
 | 
			
		||||
        consumer.runScript(
 | 
			
		||||
          'node bad-work.js',
 | 
			
		||||
          join(__dirname, '../../test/data'),
 | 
			
		||||
        ),
 | 
			
		||||
      ).rejects.toThrowError(/exec script failed/);
 | 
			
		||||
      expect(errorText).toMatch(/Error Message/);
 | 
			
		||||
      const logs = recordLog.mock.calls
 | 
			
		||||
        .map((call) => ((call[0] as unknown) as PipelineTaskLogMessage).message)
 | 
			
		||||
        .join('');
 | 
			
		||||
      expect(logs).toMatch(/10.+20.+30.+40.+50/s);
 | 
			
		||||
    });
 | 
			
		||||
    it('should log with task', async () => {
 | 
			
		||||
      const task = new PipelineTask();
 | 
			
		||||
      task.id = 'test';
 | 
			
		||||
 | 
			
		||||
      const recordLog = jest.spyOn(logsService, 'recordLog');
 | 
			
		||||
      await expect(
 | 
			
		||||
        consumer.runScript(
 | 
			
		||||
          'node bad-work.js',
 | 
			
		||||
          join(__dirname, '../../test/data'),
 | 
			
		||||
          task,
 | 
			
		||||
        ),
 | 
			
		||||
      ).rejects.toThrowError(/exec script failed/);
 | 
			
		||||
 | 
			
		||||
      expect(errorText).toMatch(/Error Message 2/);
 | 
			
		||||
      expect(
 | 
			
		||||
        ((recordLog.mock.calls[2][0] as unknown) as PipelineTaskLogMessage)
 | 
			
		||||
          .task,
 | 
			
		||||
      ).toMatchObject(task);
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('doTask', () => {
 | 
			
		||||
    let task: PipelineTask;
 | 
			
		||||
 | 
			
		||||
    beforeEach(() => {
 | 
			
		||||
      task = new PipelineTask();
 | 
			
		||||
      task.id = 'test-id';
 | 
			
		||||
      task.logs = [];
 | 
			
		||||
      task.pipeline = new Pipeline();
 | 
			
		||||
      task.pipeline.workUnitMetadata = {
 | 
			
		||||
        version: 1,
 | 
			
		||||
        units: [
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.checkout,
 | 
			
		||||
            scripts: [],
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.installDependencies,
 | 
			
		||||
            scripts: ["echo ' Hello, Fennec!'"],
 | 
			
		||||
          },
 | 
			
		||||
        ],
 | 
			
		||||
      };
 | 
			
		||||
      task.units = task.pipeline.workUnitMetadata.units.map(
 | 
			
		||||
        (unit) => unit.type,
 | 
			
		||||
      );
 | 
			
		||||
      task.pipeline.project = new Project();
 | 
			
		||||
      task.pipeline.project.name = 'test-project';
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    it('success and update task on db', async () => {
 | 
			
		||||
      const job: Job = ({
 | 
			
		||||
        data: task,
 | 
			
		||||
        update: jest.fn().mockImplementation(() => undefined),
 | 
			
		||||
      } as unknown) as Job;
 | 
			
		||||
 | 
			
		||||
      jest
 | 
			
		||||
        .spyOn(consumer, 'runScript')
 | 
			
		||||
        .mockImplementation(async () => undefined);
 | 
			
		||||
      const updateTask = jest.spyOn(tasksService, 'updateTask');
 | 
			
		||||
 | 
			
		||||
      await consumer.doTask(job);
 | 
			
		||||
 | 
			
		||||
      expect(updateTask).toHaveBeenCalledTimes(2);
 | 
			
		||||
      expect(updateTask.mock.calls[0][0].startedAt).toBeDefined();
 | 
			
		||||
      expect(updateTask.mock.calls[1][0].endedAt).toBeDefined();
 | 
			
		||||
      expect(updateTask.mock.calls[1][0].status).toEqual(TaskStatuses.success);
 | 
			
		||||
    });
 | 
			
		||||
    it('failed and update task on db', async () => {
 | 
			
		||||
      const job: Job = ({
 | 
			
		||||
        data: task,
 | 
			
		||||
        update: jest.fn().mockImplementation(() => undefined),
 | 
			
		||||
      } as unknown) as Job;
 | 
			
		||||
 | 
			
		||||
      jest.spyOn(consumer, 'runScript').mockImplementation(async () => {
 | 
			
		||||
        throw new ApplicationException('exec script failed');
 | 
			
		||||
      });
 | 
			
		||||
      const updateTask = jest.spyOn(tasksService, 'updateTask');
 | 
			
		||||
 | 
			
		||||
      await consumer.doTask(job);
 | 
			
		||||
 | 
			
		||||
      expect(updateTask).toHaveBeenCalledTimes(2);
 | 
			
		||||
      expect(updateTask.mock.calls[0][0].startedAt).toBeDefined();
 | 
			
		||||
      expect(updateTask.mock.calls[1][0].endedAt).toBeDefined();
 | 
			
		||||
      expect(updateTask.mock.calls[1][0].status).toEqual(TaskStatuses.failed);
 | 
			
		||||
    });
 | 
			
		||||
    it('should do all task', async () => {
 | 
			
		||||
      const job: Job = ({
 | 
			
		||||
        data: task,
 | 
			
		||||
        update: jest.fn().mockImplementation(() => undefined),
 | 
			
		||||
      } as unknown) as Job;
 | 
			
		||||
 | 
			
		||||
      const runScript = jest
 | 
			
		||||
        .spyOn(consumer, 'runScript')
 | 
			
		||||
        .mockImplementation(async () => undefined);
 | 
			
		||||
      const updateTask = jest.spyOn(tasksService, 'updateTask');
 | 
			
		||||
 | 
			
		||||
      await consumer.doTask(job);
 | 
			
		||||
 | 
			
		||||
      expect(runScript).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(updateTask).toHaveBeenCalledTimes(2);
 | 
			
		||||
      const taskDto: PipelineTask = updateTask.mock.calls[0][0];
 | 
			
		||||
      expect(taskDto.logs).toHaveLength(2);
 | 
			
		||||
      expect(taskDto.logs[0].status).toEqual(TaskStatuses.success);
 | 
			
		||||
      expect(taskDto.logs[0].unit).toEqual(PipelineUnits.checkout);
 | 
			
		||||
    });
 | 
			
		||||
    it('should log error message', async () => {
 | 
			
		||||
      const job: Job = ({
 | 
			
		||||
        data: task,
 | 
			
		||||
        update: jest.fn().mockImplementation(() => undefined),
 | 
			
		||||
      } as unknown) as Job;
 | 
			
		||||
 | 
			
		||||
      const runScript = jest
 | 
			
		||||
        .spyOn(consumer, 'runScript')
 | 
			
		||||
        .mockImplementation(async () => {
 | 
			
		||||
          throw new Error('bad message');
 | 
			
		||||
        });
 | 
			
		||||
      const updateTask = jest.spyOn(tasksService, 'updateTask');
 | 
			
		||||
 | 
			
		||||
      await consumer.doTask(job);
 | 
			
		||||
 | 
			
		||||
      expect(updateTask).toHaveBeenCalledTimes(2);
 | 
			
		||||
      const taskDto: PipelineTask = updateTask.mock.calls[0][0];
 | 
			
		||||
      expect(taskDto.logs).toHaveLength(2);
 | 
			
		||||
      expect(taskDto.logs[0].status).toEqual(TaskStatuses.success);
 | 
			
		||||
      expect(taskDto.logs[1].status).toEqual(TaskStatuses.failed);
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
@@ -1,147 +0,0 @@
 | 
			
		||||
import { PipelineTaskLogs } from './models/pipeline-task-logs.model';
 | 
			
		||||
import { ReposService } from './../repos/repos.service';
 | 
			
		||||
import {
 | 
			
		||||
  OnQueueCompleted,
 | 
			
		||||
  OnQueueFailed,
 | 
			
		||||
  Process,
 | 
			
		||||
  Processor,
 | 
			
		||||
} from '@nestjs/bull';
 | 
			
		||||
import { Job } from 'bull';
 | 
			
		||||
import { spawn } from 'child_process';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { PIPELINE_TASK_QUEUE } from './pipeline-tasks.constants';
 | 
			
		||||
import { PipelineTasksService } from './pipeline-tasks.service';
 | 
			
		||||
import { ApplicationException } from '../commons/exceptions/application.exception';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import debug from 'debug';
 | 
			
		||||
 | 
			
		||||
const log = debug('fennec:pipeline-tasks:consumer');
 | 
			
		||||
 | 
			
		||||
@Processor(PIPELINE_TASK_QUEUE)
 | 
			
		||||
export class PipelineTaskConsumer {
 | 
			
		||||
  constructor(
 | 
			
		||||
    private readonly service: PipelineTasksService,
 | 
			
		||||
    private readonly reposService: ReposService,
 | 
			
		||||
    private readonly logsService: PipelineTaskLogsService,
 | 
			
		||||
  ) {}
 | 
			
		||||
  @Process()
 | 
			
		||||
  async doTask(job: Job<PipelineTask>) {
 | 
			
		||||
    let task = job.data;
 | 
			
		||||
    if (task.pipeline.workUnitMetadata.version !== 1) {
 | 
			
		||||
      throw new ApplicationException(
 | 
			
		||||
        'work unit metadata version is not match.',
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    task.startedAt = new Date();
 | 
			
		||||
    task.status = TaskStatuses.working;
 | 
			
		||||
    task = await this.service.updateTask(task);
 | 
			
		||||
    log('start job');
 | 
			
		||||
    await job.update(task);
 | 
			
		||||
 | 
			
		||||
    const workspaceRoot = this.reposService.getWorkspaceRootByTask(task);
 | 
			
		||||
 | 
			
		||||
    const units = task.units.map(
 | 
			
		||||
      (type) =>
 | 
			
		||||
        task.pipeline.workUnitMetadata.units.find(
 | 
			
		||||
          (unit) => unit.type === type,
 | 
			
		||||
        ) ?? { type: type, scripts: [] },
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    log('task have [%o] units', units);
 | 
			
		||||
    try {
 | 
			
		||||
      for (const unit of units) {
 | 
			
		||||
        const unitLog = new PipelineTaskLogs();
 | 
			
		||||
        unitLog.unit = unit.type;
 | 
			
		||||
        unitLog.startedAt = new Date();
 | 
			
		||||
        log('curr unit is %s', unit.type);
 | 
			
		||||
        try {
 | 
			
		||||
          // 检出代码前执行 git checkout
 | 
			
		||||
          if (unit.type === PipelineUnits.checkout) {
 | 
			
		||||
            log('begin checkout');
 | 
			
		||||
            await this.reposService.checkout(task, workspaceRoot);
 | 
			
		||||
            unitLog.status = TaskStatuses.success;
 | 
			
		||||
            log('end checkout');
 | 
			
		||||
          }
 | 
			
		||||
          for (const script of unit.scripts) {
 | 
			
		||||
            unitLog.logs += `[RUN SCRIPT] ${script}`;
 | 
			
		||||
            log('begin runScript %s', script);
 | 
			
		||||
            await this.runScript(script, workspaceRoot, task, unit.type);
 | 
			
		||||
            log('end runScript %s', script);
 | 
			
		||||
          }
 | 
			
		||||
          unitLog.status = TaskStatuses.success;
 | 
			
		||||
        } catch (err) {
 | 
			
		||||
          unitLog.status = TaskStatuses.failed;
 | 
			
		||||
          unitLog.logs += err.message;
 | 
			
		||||
          throw err;
 | 
			
		||||
        } finally {
 | 
			
		||||
          unitLog.endedAt = new Date();
 | 
			
		||||
          unitLog.logs = await this.logsService
 | 
			
		||||
            .readLogsAsPipelineTaskLogs(task)
 | 
			
		||||
            .then(
 | 
			
		||||
              (taskLogs) =>
 | 
			
		||||
                taskLogs.find((tl) => tl.unit === unit.type)?.logs ?? '',
 | 
			
		||||
            );
 | 
			
		||||
          task.logs.push(unitLog);
 | 
			
		||||
          await job.update(task);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      task.status = TaskStatuses.success;
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      task.status = TaskStatuses.failed;
 | 
			
		||||
      log('task is failed', err);
 | 
			
		||||
    } finally {
 | 
			
		||||
      task.endedAt = new Date();
 | 
			
		||||
      task = await this.service.updateTask(task);
 | 
			
		||||
      await job.update(task);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async runScript(
 | 
			
		||||
    script: string,
 | 
			
		||||
    workspaceRoot: string,
 | 
			
		||||
    task?: PipelineTask,
 | 
			
		||||
    unit?: PipelineUnits,
 | 
			
		||||
  ): Promise<void> {
 | 
			
		||||
    return new Promise((resolve, reject) => {
 | 
			
		||||
      const sub = spawn(script, {
 | 
			
		||||
        shell: true,
 | 
			
		||||
        cwd: workspaceRoot,
 | 
			
		||||
      });
 | 
			
		||||
      sub.stderr.on('data', (data: Buffer) => {
 | 
			
		||||
        const str = data.toString();
 | 
			
		||||
        this.logsService.recordLog(
 | 
			
		||||
          PipelineTaskLogMessage.create(task, unit, str, true),
 | 
			
		||||
        );
 | 
			
		||||
      });
 | 
			
		||||
      sub.stdout.on('data', (data: Buffer) => {
 | 
			
		||||
        const str = data.toString();
 | 
			
		||||
        this.logsService.recordLog(
 | 
			
		||||
          PipelineTaskLogMessage.create(task, unit, str, false),
 | 
			
		||||
        );
 | 
			
		||||
      });
 | 
			
		||||
      sub.addListener('close', (code) => {
 | 
			
		||||
        if (code === 0) {
 | 
			
		||||
          return resolve();
 | 
			
		||||
        }
 | 
			
		||||
        return reject(new ApplicationException('exec script failed'));
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @OnQueueCompleted()
 | 
			
		||||
  onCompleted(job: Job<PipelineTask>) {
 | 
			
		||||
    log('queue onCompleted');
 | 
			
		||||
    this.service.doNextTask(job.data.pipeline);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @OnQueueFailed()
 | 
			
		||||
  onFailed(job: Job<PipelineTask>) {
 | 
			
		||||
    log('queue onFailed');
 | 
			
		||||
    this.service.doNextTask(job.data.pipeline);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,11 +1,16 @@
 | 
			
		||||
import { AppBaseEntity } from './../commons/entities/app-base-entity';
 | 
			
		||||
import { Field, ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { Column, Entity, ManyToOne } from 'typeorm';
 | 
			
		||||
import { ObjectType } from '@nestjs/graphql';
 | 
			
		||||
import { Column, Entity, ManyToOne, ValueTransformer } from 'typeorm';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { PipelineTaskLogs } from './models/pipeline-task-logs.model';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { plainToClass } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
const logsTransformer: ValueTransformer = {
 | 
			
		||||
  from: (value) => plainToClass(PipelineTaskLogs, value),
 | 
			
		||||
  to: (value) => value,
 | 
			
		||||
};
 | 
			
		||||
@ObjectType()
 | 
			
		||||
@Entity()
 | 
			
		||||
export class PipelineTask extends AppBaseEntity {
 | 
			
		||||
@@ -20,7 +25,7 @@ export class PipelineTask extends AppBaseEntity {
 | 
			
		||||
  @Column({ type: 'enum', enum: PipelineUnits, array: true })
 | 
			
		||||
  units: PipelineUnits[];
 | 
			
		||||
 | 
			
		||||
  @Column({ type: 'jsonb', default: '[]' })
 | 
			
		||||
  @Column({ type: 'jsonb', default: '[]', transformer: logsTransformer })
 | 
			
		||||
  logs: PipelineTaskLogs[];
 | 
			
		||||
 | 
			
		||||
  @Column({ type: 'enum', enum: TaskStatuses, default: TaskStatuses.pending })
 | 
			
		||||
@@ -31,4 +36,7 @@ export class PipelineTask extends AppBaseEntity {
 | 
			
		||||
 | 
			
		||||
  @Column({ nullable: true })
 | 
			
		||||
  endedAt?: Date;
 | 
			
		||||
 | 
			
		||||
  @Column({ nullable: true })
 | 
			
		||||
  runOn: string;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										74
									
								
								src/pipeline-tasks/pipeline-task.logger.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										74
									
								
								src/pipeline-tasks/pipeline-task.logger.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,74 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelineTaskLogger } from './pipeline-task.logger';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
import { take, timeout } from 'rxjs/operators';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTaskRunner', () => {
 | 
			
		||||
  let logger: PipelineTaskLogger;
 | 
			
		||||
  let module: TestingModule;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    module = await Test.createTestingModule({
 | 
			
		||||
      providers: [PipelineTaskLogger],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    logger = module.get(PipelineTaskLogger);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(logger).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('getMessage$', () => {
 | 
			
		||||
    it('normal', async () => {
 | 
			
		||||
      const event = new PipelineTaskEvent();
 | 
			
		||||
      event.taskId = 'test';
 | 
			
		||||
      const emittedAt = new Date();
 | 
			
		||||
      event.emittedAt = emittedAt.toISOString() as any;
 | 
			
		||||
      const message$ = logger.getMessage$('test');
 | 
			
		||||
 | 
			
		||||
      let receiveEvent;
 | 
			
		||||
      message$.pipe(take(1)).subscribe((value) => (receiveEvent = value));
 | 
			
		||||
      await logger.handleEvent(event);
 | 
			
		||||
      expect(receiveEvent).toMatchObject({
 | 
			
		||||
        ...event,
 | 
			
		||||
        emittedAt,
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
    it('no match', async () => {
 | 
			
		||||
      const event = new PipelineTaskEvent();
 | 
			
		||||
      event.taskId = 'test';
 | 
			
		||||
      const message$ = logger.getMessage$('other');
 | 
			
		||||
      setTimeout(() => {
 | 
			
		||||
        logger.handleEvent(event);
 | 
			
		||||
      });
 | 
			
		||||
      expect(message$.pipe(take(1), timeout(100)).toPromise()).rejects.toMatch(
 | 
			
		||||
        'timeout',
 | 
			
		||||
      );
 | 
			
		||||
    });
 | 
			
		||||
    it('multiple subscribers', async () => {
 | 
			
		||||
      const event = new PipelineTaskEvent();
 | 
			
		||||
      event.taskId = 'test';
 | 
			
		||||
      const message$ = logger.getMessage$('test');
 | 
			
		||||
      const message2$ = logger.getMessage$('test');
 | 
			
		||||
      setTimeout(() => {
 | 
			
		||||
        logger.handleEvent(event);
 | 
			
		||||
      });
 | 
			
		||||
      expect(message$.pipe(take(1), timeout(100)).toPromise()).resolves.toEqual(
 | 
			
		||||
        event,
 | 
			
		||||
      );
 | 
			
		||||
      expect(
 | 
			
		||||
        message2$.pipe(take(1), timeout(100)).toPromise(),
 | 
			
		||||
      ).resolves.toEqual(event);
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('onModuleDestroy', () => {
 | 
			
		||||
    it('complete observable when destroying module', async () => {
 | 
			
		||||
      logger.onModuleDestroy();
 | 
			
		||||
      await expect(
 | 
			
		||||
        (logger as any).message$.toPromise(),
 | 
			
		||||
      ).resolves.toBeUndefined();
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										37
									
								
								src/pipeline-tasks/pipeline-task.logger.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								src/pipeline-tasks/pipeline-task.logger.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,37 @@
 | 
			
		||||
import { RabbitSubscribe } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { Injectable, OnModuleDestroy } from '@nestjs/common';
 | 
			
		||||
import { plainToClass } from 'class-transformer';
 | 
			
		||||
import { Observable, Subject } from 'rxjs';
 | 
			
		||||
import { filter } from 'rxjs/operators';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
  QUEUE_HANDLE_PIPELINE_TASK_LOG_EVENT,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_LOG,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class PipelineTaskLogger implements OnModuleDestroy {
 | 
			
		||||
  private readonly messageSubject = new Subject<PipelineTaskEvent>();
 | 
			
		||||
  private readonly message$: Observable<PipelineTaskEvent> = this.messageSubject.pipe();
 | 
			
		||||
 | 
			
		||||
  @RabbitSubscribe({
 | 
			
		||||
    exchange: EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
    routingKey: ROUTE_PIPELINE_TASK_LOG,
 | 
			
		||||
    queue: QUEUE_HANDLE_PIPELINE_TASK_LOG_EVENT,
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async handleEvent(message: PipelineTaskEvent) {
 | 
			
		||||
    this.messageSubject.next(plainToClass(PipelineTaskEvent, message));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  getMessage$(taskId: string) {
 | 
			
		||||
    return this.message$.pipe(filter((event) => event.taskId === taskId));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  onModuleDestroy() {
 | 
			
		||||
    this.messageSubject.complete();
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										326
									
								
								src/pipeline-tasks/pipeline-task.runner.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										326
									
								
								src/pipeline-tasks/pipeline-task.runner.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,326 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { ReposService } from '../repos/repos.service';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { Project } from '../projects/project.entity';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { getLoggerToken, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
import { PipelineTaskRunner } from './pipeline-task.runner';
 | 
			
		||||
import { WorkUnitMetadata } from './models/work-unit-metadata.model';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
describe('PipelineTaskRunner', () => {
 | 
			
		||||
  let runner: PipelineTaskRunner;
 | 
			
		||||
  let reposService: ReposService;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        {
 | 
			
		||||
          provide: ReposService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            getWorkspaceRootByTask: () => 'workspace-root',
 | 
			
		||||
            checkout: async () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: getLoggerToken(PipelineTaskRunner.name),
 | 
			
		||||
          useValue: new PinoLogger({}),
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: 'spawn',
 | 
			
		||||
          useValue: () => undefined,
 | 
			
		||||
        },
 | 
			
		||||
        PipelineTaskRunner,
 | 
			
		||||
        {
 | 
			
		||||
          provide: AmqpConnection,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    reposService = module.get(ReposService);
 | 
			
		||||
    runner = module.get(PipelineTaskRunner);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(runner).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('onNewTask', async () => {
 | 
			
		||||
    const task = new PipelineTask();
 | 
			
		||||
    let tmpTask;
 | 
			
		||||
    const doTask = jest
 | 
			
		||||
      .spyOn(runner, 'doTask')
 | 
			
		||||
      .mockImplementation(async (task) => {
 | 
			
		||||
        tmpTask = task;
 | 
			
		||||
      });
 | 
			
		||||
    await runner.onNewTask(task);
 | 
			
		||||
    expect(tmpTask).toEqual(task);
 | 
			
		||||
    expect(doTask).toBeCalledTimes(1);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('test biz', () => {
 | 
			
		||||
    let emitEvent: jest.SpyInstance;
 | 
			
		||||
    beforeEach(() => {
 | 
			
		||||
      emitEvent = jest
 | 
			
		||||
        .spyOn(runner, 'emitEvent')
 | 
			
		||||
        .mockImplementation((..._) => Promise.resolve());
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    describe('doTask', () => {
 | 
			
		||||
      let checkout: jest.SpyInstance;
 | 
			
		||||
      let doTaskUnit: jest.SpyInstance;
 | 
			
		||||
 | 
			
		||||
      beforeEach(() => {
 | 
			
		||||
        checkout = jest
 | 
			
		||||
          .spyOn(runner, 'checkout')
 | 
			
		||||
          .mockImplementation((..._) => Promise.resolve('/null'));
 | 
			
		||||
        doTaskUnit = jest
 | 
			
		||||
          .spyOn(runner, 'doTaskUnit')
 | 
			
		||||
          .mockImplementation((..._) => Promise.resolve());
 | 
			
		||||
      });
 | 
			
		||||
 | 
			
		||||
      it('only checkout', async () => {
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        (task.id = 'taskId'), (task.pipeline = new Pipeline());
 | 
			
		||||
        task.units = [PipelineUnits.checkout];
 | 
			
		||||
        task.pipeline.id = 'pipelineId';
 | 
			
		||||
        task.pipeline.project = new Project();
 | 
			
		||||
        task.pipeline.project.id = 'projectId';
 | 
			
		||||
        task.pipeline.workUnitMetadata = new WorkUnitMetadata();
 | 
			
		||||
        task.pipeline.workUnitMetadata.version = 1;
 | 
			
		||||
        task.pipeline.workUnitMetadata.units = [
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.checkout,
 | 
			
		||||
            scripts: [],
 | 
			
		||||
          },
 | 
			
		||||
        ];
 | 
			
		||||
 | 
			
		||||
        await runner.doTask(task);
 | 
			
		||||
 | 
			
		||||
        expect(checkout).toBeCalledTimes(1);
 | 
			
		||||
        expect(doTaskUnit).toBeCalledTimes(0);
 | 
			
		||||
        expect(emitEvent).toBeCalledTimes(2);
 | 
			
		||||
        expect(emitEvent.mock.calls[0][0]).toMatchObject(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[0][1]).toBeNull();
 | 
			
		||||
        expect(emitEvent.mock.calls[0][2]).toEqual(TaskStatuses.working);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][0]).toMatchObject(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][1]).toBeNull();
 | 
			
		||||
        expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.success);
 | 
			
		||||
      });
 | 
			
		||||
 | 
			
		||||
      it('many units', async () => {
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        (task.id = 'taskId'), (task.pipeline = new Pipeline());
 | 
			
		||||
        task.units = [
 | 
			
		||||
          PipelineUnits.checkout,
 | 
			
		||||
          PipelineUnits.test,
 | 
			
		||||
          PipelineUnits.deploy,
 | 
			
		||||
        ];
 | 
			
		||||
        task.pipeline.id = 'pipelineId';
 | 
			
		||||
        task.pipeline.project = new Project();
 | 
			
		||||
        task.pipeline.project.id = 'projectId';
 | 
			
		||||
        task.pipeline.workUnitMetadata = new WorkUnitMetadata();
 | 
			
		||||
        task.pipeline.workUnitMetadata.version = 1;
 | 
			
		||||
        task.pipeline.workUnitMetadata.units = [
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.checkout,
 | 
			
		||||
            scripts: [],
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.installDependencies,
 | 
			
		||||
            scripts: ['pwd'],
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.test,
 | 
			
		||||
            scripts: ['pwd'],
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.deploy,
 | 
			
		||||
            scripts: ['pwd', 'uname'],
 | 
			
		||||
          },
 | 
			
		||||
        ];
 | 
			
		||||
 | 
			
		||||
        await runner.doTask(task);
 | 
			
		||||
 | 
			
		||||
        expect(checkout).toBeCalledTimes(1);
 | 
			
		||||
        expect(doTaskUnit).toBeCalledTimes(2);
 | 
			
		||||
        expect(emitEvent).toBeCalledTimes(2);
 | 
			
		||||
      });
 | 
			
		||||
 | 
			
		||||
      it('unit work failed', async () => {
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        (task.id = 'taskId'), (task.pipeline = new Pipeline());
 | 
			
		||||
        task.units = [PipelineUnits.checkout, PipelineUnits.test];
 | 
			
		||||
        task.pipeline.id = 'pipelineId';
 | 
			
		||||
        task.pipeline.project = new Project();
 | 
			
		||||
        task.pipeline.project.id = 'projectId';
 | 
			
		||||
        task.pipeline.workUnitMetadata = new WorkUnitMetadata();
 | 
			
		||||
        task.pipeline.workUnitMetadata.version = 1;
 | 
			
		||||
        task.pipeline.workUnitMetadata.units = [
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.checkout,
 | 
			
		||||
            scripts: [],
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            type: PipelineUnits.test,
 | 
			
		||||
            scripts: ['pwd'],
 | 
			
		||||
          },
 | 
			
		||||
        ];
 | 
			
		||||
 | 
			
		||||
        doTaskUnit = jest
 | 
			
		||||
          .spyOn(runner, 'doTaskUnit')
 | 
			
		||||
          .mockImplementation((..._) =>
 | 
			
		||||
            Promise.reject(new Error('test error')),
 | 
			
		||||
          );
 | 
			
		||||
        await runner.doTask(task);
 | 
			
		||||
 | 
			
		||||
        expect(checkout).toBeCalledTimes(1);
 | 
			
		||||
        expect(doTaskUnit).toBeCalledTimes(1);
 | 
			
		||||
        expect(emitEvent).toBeCalledTimes(2);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][0]).toMatchObject(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][1]).toBeNull();
 | 
			
		||||
        expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.failed);
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    describe('doTaskUnit', () => {
 | 
			
		||||
      it('success', async () => {
 | 
			
		||||
        const runScript = jest
 | 
			
		||||
          .spyOn(runner, 'runScript')
 | 
			
		||||
          .mockImplementation((..._) => Promise.resolve());
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
 | 
			
		||||
        const unit = PipelineUnits.test;
 | 
			
		||||
        const workspacePath = '/null';
 | 
			
		||||
        await runner.doTaskUnit(unit, ['pwd'], task, workspacePath);
 | 
			
		||||
 | 
			
		||||
        expect(emitEvent.mock.calls[0][0]).toEqual(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[0][1]).toEqual(unit);
 | 
			
		||||
        expect(emitEvent.mock.calls[0][2]).toEqual(TaskStatuses.working);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][0]).toEqual(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][1]).toEqual(unit);
 | 
			
		||||
        expect(emitEvent.mock.calls[1][2]).toEqual(TaskStatuses.success);
 | 
			
		||||
        expect(runScript.mock.calls[0][0]).toEqual('pwd');
 | 
			
		||||
        expect(runScript.mock.calls[0][1]).toEqual(workspacePath);
 | 
			
		||||
        expect(runScript.mock.calls[0][2]).toEqual(task);
 | 
			
		||||
        expect(runScript.mock.calls[0][3]).toEqual(unit);
 | 
			
		||||
      });
 | 
			
		||||
      it('failed', async () => {
 | 
			
		||||
        const runScript = jest
 | 
			
		||||
          .spyOn(runner, 'runScript')
 | 
			
		||||
          .mockImplementation((..._) =>
 | 
			
		||||
            Promise.reject(new Error('test error')),
 | 
			
		||||
          );
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
 | 
			
		||||
        const unit = PipelineUnits.test;
 | 
			
		||||
        const workspacePath = '/null';
 | 
			
		||||
        await expect(
 | 
			
		||||
          runner.doTaskUnit(unit, ['pwd'], task, workspacePath),
 | 
			
		||||
        ).rejects.toThrow('test error');
 | 
			
		||||
 | 
			
		||||
        expect(emitEvent.mock.calls[1]?.[0]).toEqual(task);
 | 
			
		||||
        expect(emitEvent.mock.calls[1]?.[1]).toEqual(unit);
 | 
			
		||||
        expect(emitEvent.mock.calls[1]?.[2]).toEqual(TaskStatuses.failed);
 | 
			
		||||
        expect(runScript).toBeCalledTimes(1);
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    describe('runScript', () => {
 | 
			
		||||
      it('normal', async () => {
 | 
			
		||||
        const spawn = jest.fn((..._: any[]) => ({
 | 
			
		||||
          stdout: {
 | 
			
		||||
            on: () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
          stderr: {
 | 
			
		||||
            on: () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
          addListener: (_: any, fn: (code: number) => void) => {
 | 
			
		||||
            fn(0);
 | 
			
		||||
          },
 | 
			
		||||
        }));
 | 
			
		||||
        (runner as any).spawn = spawn;
 | 
			
		||||
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        task.id = 'taskId';
 | 
			
		||||
        const unit = PipelineUnits.deploy;
 | 
			
		||||
 | 
			
		||||
        await runner.runScript('script name', 'workspaceRoot', task, unit);
 | 
			
		||||
        expect(spawn).toHaveBeenCalledTimes(1);
 | 
			
		||||
        expect(spawn.mock.calls[0][0]).toEqual('script name');
 | 
			
		||||
        expect(spawn.mock.calls[0][1]).toMatchObject({
 | 
			
		||||
          shell: true,
 | 
			
		||||
          cwd: 'workspaceRoot',
 | 
			
		||||
        });
 | 
			
		||||
      });
 | 
			
		||||
      it('failed', async () => {
 | 
			
		||||
        const spawn = jest.fn((..._: any[]) => ({
 | 
			
		||||
          stdout: {
 | 
			
		||||
            on: () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
          stderr: {
 | 
			
		||||
            on: () => undefined,
 | 
			
		||||
          },
 | 
			
		||||
          addListener: (_: any, fn: (code: number) => void) => {
 | 
			
		||||
            fn(1);
 | 
			
		||||
          },
 | 
			
		||||
        }));
 | 
			
		||||
        (runner as any).spawn = spawn;
 | 
			
		||||
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        task.id = 'taskId';
 | 
			
		||||
        const unit = PipelineUnits.deploy;
 | 
			
		||||
 | 
			
		||||
        expect(
 | 
			
		||||
          runner.runScript('script name', 'workspaceRoot', task, unit),
 | 
			
		||||
        ).rejects.toThrowError();
 | 
			
		||||
      });
 | 
			
		||||
      it('wait emit message done', async () => {
 | 
			
		||||
        let finishedFn: () => void;
 | 
			
		||||
        const on = jest.fn((_: any, fn: (buff: Buffer) => void) => {
 | 
			
		||||
          setTimeout(() => {
 | 
			
		||||
            fn(Buffer.from('message 1'));
 | 
			
		||||
            setTimeout(() => {
 | 
			
		||||
              fn(Buffer.from('message 2'));
 | 
			
		||||
              setTimeout(() => {
 | 
			
		||||
                fn(Buffer.from('message 3'));
 | 
			
		||||
                finishedFn();
 | 
			
		||||
              }, 1000);
 | 
			
		||||
            }, 10);
 | 
			
		||||
          }, 10);
 | 
			
		||||
        });
 | 
			
		||||
        const spawn = jest.fn((..._: any[]) => ({
 | 
			
		||||
          stdout: {
 | 
			
		||||
            on,
 | 
			
		||||
          },
 | 
			
		||||
          stderr: {
 | 
			
		||||
            on,
 | 
			
		||||
          },
 | 
			
		||||
          addListener: (_: any, fn: (code: number) => void) => {
 | 
			
		||||
            finishedFn = () => fn(0);
 | 
			
		||||
          },
 | 
			
		||||
        }));
 | 
			
		||||
 | 
			
		||||
        let emitSuccessCount = 0;
 | 
			
		||||
        jest.spyOn(runner, 'emitEvent').mockImplementation((..._: any[]) => {
 | 
			
		||||
          return new Promise((resolve) => {
 | 
			
		||||
            setTimeout(() => {
 | 
			
		||||
              emitSuccessCount++;
 | 
			
		||||
              resolve();
 | 
			
		||||
            }, 1000);
 | 
			
		||||
          });
 | 
			
		||||
        });
 | 
			
		||||
        (runner as any).spawn = spawn;
 | 
			
		||||
 | 
			
		||||
        const task = new PipelineTask();
 | 
			
		||||
        task.id = 'taskId';
 | 
			
		||||
        const unit = PipelineUnits.deploy;
 | 
			
		||||
 | 
			
		||||
        await runner.runScript('script name', 'workspaceRoot', task, unit);
 | 
			
		||||
        expect(emitSuccessCount).toEqual(1 + 6);
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										301
									
								
								src/pipeline-tasks/pipeline-task.runner.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										301
									
								
								src/pipeline-tasks/pipeline-task.runner.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,301 @@
 | 
			
		||||
import { ReposService } from '../repos/repos.service';
 | 
			
		||||
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { ApplicationException } from '../commons/exceptions/application.exception';
 | 
			
		||||
import { PipelineUnits } from './enums/pipeline-units.enum';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { InjectPinoLogger, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
import {
 | 
			
		||||
  AmqpConnection,
 | 
			
		||||
  RabbitRPC,
 | 
			
		||||
  RabbitSubscribe,
 | 
			
		||||
} from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
import { last } from 'ramda';
 | 
			
		||||
import { Inject } from '@nestjs/common';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
  QUEUE_PIPELINE_TASK_KILL,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_KILL,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_LOG,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
import {
 | 
			
		||||
  getInstanceName,
 | 
			
		||||
  getSelfInstanceQueueKey,
 | 
			
		||||
  getSelfInstanceRouteKey,
 | 
			
		||||
} from '../commons/utils/rabbit-mq';
 | 
			
		||||
 | 
			
		||||
type Spawn = typeof spawn;
 | 
			
		||||
 | 
			
		||||
export class PipelineTaskRunner {
 | 
			
		||||
  readonly processes = new Map<string, ChildProcessWithoutNullStreams>();
 | 
			
		||||
  readonly stopTaskIds = new Set<string>();
 | 
			
		||||
 | 
			
		||||
  constructor(
 | 
			
		||||
    private readonly reposService: ReposService,
 | 
			
		||||
    @InjectPinoLogger(PipelineTaskRunner.name)
 | 
			
		||||
    private readonly logger: PinoLogger,
 | 
			
		||||
    @Inject('spawn')
 | 
			
		||||
    private readonly spawn: Spawn,
 | 
			
		||||
    private readonly amqpConnection: AmqpConnection,
 | 
			
		||||
  ) {}
 | 
			
		||||
  @RabbitSubscribe({
 | 
			
		||||
    exchange: 'new-pipeline-task',
 | 
			
		||||
    routingKey: 'mac',
 | 
			
		||||
    queue: 'mac.new-pipeline-task',
 | 
			
		||||
  })
 | 
			
		||||
  async onNewTask(task: PipelineTask) {
 | 
			
		||||
    this.logger.info({ task }, 'on new task [%s].', task.id);
 | 
			
		||||
    try {
 | 
			
		||||
      await this.doTask(task);
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      this.logger.error({ task, err }, err.message);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  @RabbitRPC({
 | 
			
		||||
    exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
    routingKey: getSelfInstanceRouteKey(ROUTE_PIPELINE_TASK_KILL),
 | 
			
		||||
    queue: getSelfInstanceQueueKey(QUEUE_PIPELINE_TASK_KILL),
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
      durable: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async onStopTask(task: PipelineTask) {
 | 
			
		||||
    this.logger.info({ task }, 'on stop task [%s].', task.id);
 | 
			
		||||
    this.stopTaskIds.add(task.id);
 | 
			
		||||
    const process = this.processes.get(task.id);
 | 
			
		||||
    if (process) {
 | 
			
		||||
      this.logger.info({ task }, 'send signal SIGINT to child process.');
 | 
			
		||||
      process.kill('SIGINT');
 | 
			
		||||
 | 
			
		||||
      setTimeout(() => {
 | 
			
		||||
        setTimeout(() => {
 | 
			
		||||
          this.stopTaskIds.delete(task.id);
 | 
			
		||||
        }, 10_000);
 | 
			
		||||
        if (process === this.processes.get(task.id)) {
 | 
			
		||||
          this.logger.info({ task }, 'send signal SIGKILL to child process.');
 | 
			
		||||
          process.kill('SIGKILL');
 | 
			
		||||
          return;
 | 
			
		||||
        }
 | 
			
		||||
        if (this.processes.has(task.id)) {
 | 
			
		||||
          this.logger.error(
 | 
			
		||||
            { task },
 | 
			
		||||
            'this pipeline task not stop yet. there is a new process running, maybe is a bug about error capture',
 | 
			
		||||
          );
 | 
			
		||||
        }
 | 
			
		||||
      }, 10_000);
 | 
			
		||||
    } else {
 | 
			
		||||
      this.logger.info({ task }, 'child process is not running.');
 | 
			
		||||
    }
 | 
			
		||||
    return true;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async doTask(task: PipelineTask) {
 | 
			
		||||
    if (task.pipeline.workUnitMetadata.version !== 1) {
 | 
			
		||||
      throw new ApplicationException(
 | 
			
		||||
        'work unit metadata version is not match.',
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
    await this.emitEvent(
 | 
			
		||||
      task,
 | 
			
		||||
      null,
 | 
			
		||||
      TaskStatuses.working,
 | 
			
		||||
      `[start task]`,
 | 
			
		||||
      'stdout',
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    this.logger.info('running task [%s].', task.id);
 | 
			
		||||
    try {
 | 
			
		||||
      const workspaceRoot = await this.checkout(task);
 | 
			
		||||
      const units = task.units
 | 
			
		||||
        .filter((unit) => unit !== PipelineUnits.checkout)
 | 
			
		||||
        .map(
 | 
			
		||||
          (type) =>
 | 
			
		||||
            task.pipeline.workUnitMetadata.units.find(
 | 
			
		||||
              (unit) => unit.type === type,
 | 
			
		||||
            ) ?? { type: type, scripts: [] },
 | 
			
		||||
        );
 | 
			
		||||
      this.logger.info({ units }, 'begin run units.');
 | 
			
		||||
      for (const unit of units) {
 | 
			
		||||
        await this.doTaskUnit(unit.type, unit.scripts, task, workspaceRoot);
 | 
			
		||||
      }
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        null,
 | 
			
		||||
        TaskStatuses.success,
 | 
			
		||||
        `[finished task] success`,
 | 
			
		||||
        'stdout',
 | 
			
		||||
      );
 | 
			
		||||
      this.logger.info({ task }, 'task [%s] completed.', task.id);
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        null,
 | 
			
		||||
        TaskStatuses.failed,
 | 
			
		||||
        `[finished unit] ${err.message}`,
 | 
			
		||||
        'stderr',
 | 
			
		||||
      );
 | 
			
		||||
      this.logger.error({ task, error: err }, 'task [%s] failed.', task.id);
 | 
			
		||||
    } finally {
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async doTaskUnit(
 | 
			
		||||
    unit: PipelineUnits,
 | 
			
		||||
    scripts: string[],
 | 
			
		||||
    task: PipelineTask,
 | 
			
		||||
    workspaceRoot: string,
 | 
			
		||||
  ) {
 | 
			
		||||
    await this.emitEvent(
 | 
			
		||||
      task,
 | 
			
		||||
      unit,
 | 
			
		||||
      TaskStatuses.working,
 | 
			
		||||
      `[begin unit] ${unit}`,
 | 
			
		||||
      'stdin',
 | 
			
		||||
    );
 | 
			
		||||
    this.logger.info({ task }, 'curr unit is %s', unit);
 | 
			
		||||
    try {
 | 
			
		||||
      for (const script of scripts) {
 | 
			
		||||
        this.logger.debug('begin runScript %s', script);
 | 
			
		||||
        if (this.stopTaskIds.has(task.id)) {
 | 
			
		||||
          throw new ApplicationException('Task is be KILLED');
 | 
			
		||||
        }
 | 
			
		||||
        await this.runScript(script, workspaceRoot, task, unit);
 | 
			
		||||
        this.logger.debug('end runScript %s', script);
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        unit,
 | 
			
		||||
        TaskStatuses.success,
 | 
			
		||||
        `[finished unit] ${unit}`,
 | 
			
		||||
        'stdout',
 | 
			
		||||
      );
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        unit,
 | 
			
		||||
        TaskStatuses.failed,
 | 
			
		||||
        `[finished unit] ${err.message}`,
 | 
			
		||||
        'stderr',
 | 
			
		||||
      );
 | 
			
		||||
      throw err;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async checkout(task: PipelineTask) {
 | 
			
		||||
    await this.emitEvent(
 | 
			
		||||
      task,
 | 
			
		||||
      PipelineUnits.checkout,
 | 
			
		||||
      TaskStatuses.working,
 | 
			
		||||
      '[begin unit] checkout',
 | 
			
		||||
      'stdin',
 | 
			
		||||
    );
 | 
			
		||||
    try {
 | 
			
		||||
      const path = await this.reposService.checkout4Task(task);
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        PipelineUnits.checkout,
 | 
			
		||||
        TaskStatuses.success,
 | 
			
		||||
        'checkout success.',
 | 
			
		||||
        'stdout',
 | 
			
		||||
      );
 | 
			
		||||
      return path;
 | 
			
		||||
    } catch (err) {
 | 
			
		||||
      await this.emitEvent(
 | 
			
		||||
        task,
 | 
			
		||||
        PipelineUnits.checkout,
 | 
			
		||||
        TaskStatuses.failed,
 | 
			
		||||
        'checkout failed.',
 | 
			
		||||
        'stderr',
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async emitEvent(
 | 
			
		||||
    task: PipelineTask,
 | 
			
		||||
    unit: PipelineUnits | null,
 | 
			
		||||
    status: TaskStatuses,
 | 
			
		||||
    message: string,
 | 
			
		||||
    messageType: 'stderr' | 'stdout' | 'stdin',
 | 
			
		||||
  ) {
 | 
			
		||||
    const event: PipelineTaskEvent = {
 | 
			
		||||
      taskId: task.id,
 | 
			
		||||
      pipelineId: task.pipeline.id,
 | 
			
		||||
      projectId: task.pipeline.project.id,
 | 
			
		||||
      unit,
 | 
			
		||||
      emittedAt: new Date(),
 | 
			
		||||
      message: last(message) === '\n' ? message : message + '\n',
 | 
			
		||||
      messageType,
 | 
			
		||||
      status,
 | 
			
		||||
    };
 | 
			
		||||
    this.amqpConnection
 | 
			
		||||
      .publish(EXCHANGE_PIPELINE_TASK_FANOUT, ROUTE_PIPELINE_TASK_LOG, event, {
 | 
			
		||||
        headers: {
 | 
			
		||||
          sender: getInstanceName(),
 | 
			
		||||
        },
 | 
			
		||||
      })
 | 
			
		||||
      .catch((error) => {
 | 
			
		||||
        this.logger.error(
 | 
			
		||||
          { error, event },
 | 
			
		||||
          'send event message to queue failed. %s',
 | 
			
		||||
          error.message,
 | 
			
		||||
        );
 | 
			
		||||
      });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async runScript(
 | 
			
		||||
    script: string,
 | 
			
		||||
    workspaceRoot: string,
 | 
			
		||||
    task: PipelineTask,
 | 
			
		||||
    unit: PipelineUnits,
 | 
			
		||||
  ): Promise<void> {
 | 
			
		||||
    await this.emitEvent(task, unit, TaskStatuses.working, script, 'stdin');
 | 
			
		||||
    return new Promise((resolve, reject) => {
 | 
			
		||||
      const sub = this.spawn(script, {
 | 
			
		||||
        shell: true,
 | 
			
		||||
        cwd: workspaceRoot,
 | 
			
		||||
      });
 | 
			
		||||
      this.processes.set(task.id, sub);
 | 
			
		||||
      let loggingCount = 0; // semaphore
 | 
			
		||||
 | 
			
		||||
      sub.stderr.on('data', (data: Buffer) => {
 | 
			
		||||
        const str = data.toString();
 | 
			
		||||
        loggingCount++;
 | 
			
		||||
 | 
			
		||||
        this.emitEvent(task, unit, TaskStatuses.working, str, 'stdout').finally(
 | 
			
		||||
          () => loggingCount--,
 | 
			
		||||
        );
 | 
			
		||||
      });
 | 
			
		||||
      sub.stdout.on('data', (data: Buffer) => {
 | 
			
		||||
        const str = data.toString();
 | 
			
		||||
        loggingCount++;
 | 
			
		||||
 | 
			
		||||
        this.emitEvent(task, unit, TaskStatuses.working, str, 'stderr').finally(
 | 
			
		||||
          () => loggingCount--,
 | 
			
		||||
        );
 | 
			
		||||
      });
 | 
			
		||||
      sub.addListener('close', async (code) => {
 | 
			
		||||
        this.processes.delete(task.id);
 | 
			
		||||
        await new Promise<void>(async (resolve) => {
 | 
			
		||||
          for (let i = 0; i < 10 && loggingCount > 0; i++) {
 | 
			
		||||
            await new Promise((resolve) => setTimeout(resolve, 500));
 | 
			
		||||
            this.logger.debug('waiting logging... (%dx500ms)', i);
 | 
			
		||||
          }
 | 
			
		||||
          resolve();
 | 
			
		||||
        });
 | 
			
		||||
        if (code === 0) {
 | 
			
		||||
          return resolve();
 | 
			
		||||
        }
 | 
			
		||||
        if (this.stopTaskIds.has(task.id)) {
 | 
			
		||||
          throw reject(new ApplicationException('Task is be KILLED'));
 | 
			
		||||
        }
 | 
			
		||||
        return reject(new ApplicationException('exec script failed'));
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,3 +1,9 @@
 | 
			
		||||
export const PIPELINE_TASK_QUEUE = 'PIPELINE_TASK_QUEUE';
 | 
			
		||||
export const PIPELINE_TASK_LOG_QUEUE = 'PIPELINE_TASK_LOG_QUEUE';
 | 
			
		||||
export const PIPELINE_TASK_LOG_PUBSUB = 'PIPELINE_TASK_LOG_PUBSUB';
 | 
			
		||||
export const EXCHANGE_PIPELINE_TASK_TOPIC = 'pipeline-task.topic';
 | 
			
		||||
export const EXCHANGE_PIPELINE_TASK_FANOUT = 'pipeline-task.fanout';
 | 
			
		||||
export const ROUTE_PIPELINE_TASK_LOG = 'pipeline-task-log';
 | 
			
		||||
export const QUEUE_HANDLE_PIPELINE_TASK_LOG_EVENT = 'pipeline-task-log';
 | 
			
		||||
export const QUEUE_WRITE_PIPELINE_TASK_LOG = 'write-pipeline-task-log';
 | 
			
		||||
export const ROUTE_PIPELINE_TASK_DONE = 'pipeline-task-done';
 | 
			
		||||
export const QUEUE_PIPELINE_TASK_DONE = 'pipeline-task-done';
 | 
			
		||||
export const ROUTE_PIPELINE_TASK_KILL = 'pipeline-task-kill';
 | 
			
		||||
export const QUEUE_PIPELINE_TASK_KILL = 'pipeline-task-kill';
 | 
			
		||||
 
 | 
			
		||||
@@ -6,33 +6,66 @@ import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { ReposModule } from '../repos/repos.module';
 | 
			
		||||
import { RedisModule } from 'nestjs-redis';
 | 
			
		||||
import { BullModule } from '@nestjs/bull';
 | 
			
		||||
import { PipelineTaskConsumer } from './pipeline-task.consumer';
 | 
			
		||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { ConfigModule, ConfigService } from '@nestjs/config';
 | 
			
		||||
import { PipelineTaskRunner } from './pipeline-task.runner';
 | 
			
		||||
import { spawn } from 'child_process';
 | 
			
		||||
import {
 | 
			
		||||
  PIPELINE_TASK_QUEUE,
 | 
			
		||||
  PIPELINE_TASK_LOG_PUBSUB,
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import { PubSub } from 'apollo-server-express';
 | 
			
		||||
import { PipelineTaskLogger } from './pipeline-task.logger';
 | 
			
		||||
import { PipelineTaskFlushService } from './pipeline-task-flush.service';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [
 | 
			
		||||
    TypeOrmModule.forFeature([PipelineTask, Pipeline]),
 | 
			
		||||
    BullModule.registerQueue({
 | 
			
		||||
      name: PIPELINE_TASK_QUEUE,
 | 
			
		||||
    }),
 | 
			
		||||
    RedisModule,
 | 
			
		||||
    ReposModule,
 | 
			
		||||
    RabbitMQModule.forRootAsync(RabbitMQModule, {
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
        uri: configService.get<string>('db.rabbitmq.uri'),
 | 
			
		||||
        exchanges: [
 | 
			
		||||
          {
 | 
			
		||||
            name: 'new-pipeline-task',
 | 
			
		||||
            type: 'fanout',
 | 
			
		||||
            options: {
 | 
			
		||||
              durable: true,
 | 
			
		||||
              autoDelete: true,
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            name: EXCHANGE_PIPELINE_TASK_FANOUT,
 | 
			
		||||
            type: 'fanout',
 | 
			
		||||
            options: {
 | 
			
		||||
              durable: false,
 | 
			
		||||
              autoDelete: true,
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            name: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
            type: 'topic',
 | 
			
		||||
            options: {
 | 
			
		||||
              durable: false,
 | 
			
		||||
              autoDelete: true,
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
        ],
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
  ],
 | 
			
		||||
  providers: [
 | 
			
		||||
    PipelineTasksService,
 | 
			
		||||
    PipelineTasksResolver,
 | 
			
		||||
    PipelineTaskConsumer,
 | 
			
		||||
    PipelineTaskLogsService,
 | 
			
		||||
    PipelineTaskRunner,
 | 
			
		||||
    PipelineTaskLogger,
 | 
			
		||||
    {
 | 
			
		||||
      provide: Symbol(PIPELINE_TASK_LOG_PUBSUB),
 | 
			
		||||
      useValue: new PubSub(),
 | 
			
		||||
      provide: 'spawn',
 | 
			
		||||
      useValue: spawn,
 | 
			
		||||
    },
 | 
			
		||||
    PipelineTaskFlushService,
 | 
			
		||||
  ],
 | 
			
		||||
  exports: [PipelineTasksService],
 | 
			
		||||
})
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelineTaskLogger } from './pipeline-task.logger';
 | 
			
		||||
import { PipelineTasksResolver } from './pipeline-tasks.resolver';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import { PipelineTasksService } from './pipeline-tasks.service';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTasksResolver', () => {
 | 
			
		||||
@@ -15,7 +15,7 @@ describe('PipelineTasksResolver', () => {
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: PipelineTaskLogsService,
 | 
			
		||||
          provide: PipelineTaskLogger,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
 
 | 
			
		||||
@@ -2,15 +2,17 @@ import { Resolver, Args, Mutation, Subscription, Query } from '@nestjs/graphql';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { PipelineTasksService } from './pipeline-tasks.service';
 | 
			
		||||
import { CreatePipelineTaskInput } from './dtos/create-pipeline-task.input';
 | 
			
		||||
import { PipelineTaskLogMessage } from './models/pipeline-task-log-message.module';
 | 
			
		||||
import { PipelineTaskLogArgs } from './dtos/pipeline-task-log.args';
 | 
			
		||||
import { PipelineTaskLogsService } from './pipeline-task-logs.service';
 | 
			
		||||
import { plainToClass } from 'class-transformer';
 | 
			
		||||
import { PipelineTaskLogger } from './pipeline-task.logger';
 | 
			
		||||
import { observableToAsyncIterable } from '@graphql-tools/utils';
 | 
			
		||||
import { PipelineTaskEvent } from './models/pipeline-task-event';
 | 
			
		||||
 | 
			
		||||
@Resolver()
 | 
			
		||||
export class PipelineTasksResolver {
 | 
			
		||||
  constructor(
 | 
			
		||||
    private readonly service: PipelineTasksService,
 | 
			
		||||
    private readonly logsService: PipelineTaskLogsService,
 | 
			
		||||
    private readonly taskLogger: PipelineTaskLogger,
 | 
			
		||||
  ) {}
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => PipelineTask)
 | 
			
		||||
@@ -18,15 +20,17 @@ export class PipelineTasksResolver {
 | 
			
		||||
    return await this.service.addTask(taskDto);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Subscription(() => PipelineTaskLogMessage, {
 | 
			
		||||
  @Subscription(() => PipelineTaskEvent, {
 | 
			
		||||
    resolve: (value) => {
 | 
			
		||||
      return value;
 | 
			
		||||
      const data = plainToClass(PipelineTaskEvent, value);
 | 
			
		||||
      return data;
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async pipelineTaskLog(@Args() args: PipelineTaskLogArgs) {
 | 
			
		||||
  async pipelineTaskEvent(@Args() args: PipelineTaskLogArgs) {
 | 
			
		||||
    const task = await this.service.findTaskById(args.taskId);
 | 
			
		||||
    const asyncIterator = this.logsService.watchLogs(task);
 | 
			
		||||
    return asyncIterator;
 | 
			
		||||
    return observableToAsyncIterable<PipelineTaskEvent>(
 | 
			
		||||
      this.taskLogger.getMessage$(task.id),
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Subscription(() => PipelineTask, {
 | 
			
		||||
@@ -35,7 +39,7 @@ export class PipelineTasksResolver {
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async pipelineTaskChanged(@Args('id') id: string) {
 | 
			
		||||
    return await this.service.watchTaskUpdated(id);
 | 
			
		||||
    // return await this.service.watchTaskUpdated(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Query(() => [PipelineTask])
 | 
			
		||||
@@ -44,7 +48,14 @@ export class PipelineTasksResolver {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Query(() => PipelineTask)
 | 
			
		||||
  async findPipelineTask(@Args('id') id: string) {
 | 
			
		||||
  async pipelineTask(@Args('id') id: string) {
 | 
			
		||||
    return await this.service.findTaskById(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => Boolean)
 | 
			
		||||
  async stopPipelineTask(@Args('id') id: string) {
 | 
			
		||||
    const task = await this.service.findTaskById(id);
 | 
			
		||||
    await this.service.stopTask(task);
 | 
			
		||||
    return true;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,51 +2,19 @@ import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelineTasksService } from './pipeline-tasks.service';
 | 
			
		||||
import { getRepositoryToken } from '@nestjs/typeorm';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { PIPELINE_TASK_QUEUE } from './pipeline-tasks.constants';
 | 
			
		||||
import { getQueueToken } from '@nestjs/bull';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { EntityNotFoundError } from 'typeorm/error/EntityNotFoundError';
 | 
			
		||||
import { Repository } from 'typeorm';
 | 
			
		||||
import { Queue } from 'bull';
 | 
			
		||||
import { LockFailedException } from '../commons/exceptions/lock-failed.exception';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { PipelineTaskFlushService } from './pipeline-task-flush.service';
 | 
			
		||||
import { getLoggerToken, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
 | 
			
		||||
describe('PipelineTasksService', () => {
 | 
			
		||||
  let service: PipelineTasksService;
 | 
			
		||||
  let module: TestingModule;
 | 
			
		||||
  let taskRepository: Repository<PipelineTask>;
 | 
			
		||||
  let pipelineRepository: Repository<Pipeline>;
 | 
			
		||||
  const getBasePipeline = () =>
 | 
			
		||||
    ({
 | 
			
		||||
      id: 'test',
 | 
			
		||||
      name: '测试流水线',
 | 
			
		||||
      branch: 'master',
 | 
			
		||||
      workUnitMetadata: {},
 | 
			
		||||
      project: {
 | 
			
		||||
        id: 'test-project',
 | 
			
		||||
      },
 | 
			
		||||
    } as Pipeline);
 | 
			
		||||
  let redisClient;
 | 
			
		||||
  let taskQueue: Queue;
 | 
			
		||||
  const getTask = () =>
 | 
			
		||||
    ({
 | 
			
		||||
      pipelineId: 'test',
 | 
			
		||||
      commit: 'test',
 | 
			
		||||
      pipeline: { branch: 'master' },
 | 
			
		||||
      units: [],
 | 
			
		||||
    } as PipelineTask);
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    redisClient = (() => ({
 | 
			
		||||
      set: jest.fn().mockImplementation(async () => 'OK'),
 | 
			
		||||
      del: jest.fn().mockImplementation(async () => 'test'),
 | 
			
		||||
      get: jest.fn().mockImplementation(async () => 'test'),
 | 
			
		||||
      lpush: jest.fn().mockImplementation(async () => 1),
 | 
			
		||||
      rpop: jest.fn().mockImplementation(async () => JSON.stringify(getTask())),
 | 
			
		||||
    }))() as any;
 | 
			
		||||
    taskQueue = (() => ({
 | 
			
		||||
      add: jest.fn().mockImplementation(async () => null),
 | 
			
		||||
    }))() as any;
 | 
			
		||||
    module = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        PipelineTasksService,
 | 
			
		||||
@@ -59,14 +27,16 @@ describe('PipelineTasksService', () => {
 | 
			
		||||
          useValue: new Repository(),
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: getQueueToken(PIPELINE_TASK_QUEUE),
 | 
			
		||||
          useValue: taskQueue,
 | 
			
		||||
          provide: AmqpConnection,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: RedisService,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            getClient: jest.fn(() => redisClient),
 | 
			
		||||
          },
 | 
			
		||||
          provide: PipelineTaskFlushService,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: getLoggerToken(PipelineTasksService.name),
 | 
			
		||||
          useValue: new PinoLogger({}),
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
@@ -87,119 +57,43 @@ describe('PipelineTasksService', () => {
 | 
			
		||||
    expect(service).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('addTask', () => {
 | 
			
		||||
    beforeEach(() => {
 | 
			
		||||
      jest
 | 
			
		||||
        .spyOn(pipelineRepository, 'findOneOrFail')
 | 
			
		||||
        .mockImplementation(async () => getBasePipeline());
 | 
			
		||||
    });
 | 
			
		||||
    it('pipeline not found', async () => {
 | 
			
		||||
      jest.spyOn(taskRepository, 'findOneOrFail').mockImplementation(() => {
 | 
			
		||||
        throw new EntityNotFoundError(Pipeline, {});
 | 
			
		||||
      });
 | 
			
		||||
      await expect(
 | 
			
		||||
        service.addTask({ pipelineId: 'test', commit: 'test', units: [] }),
 | 
			
		||||
      ).rejects;
 | 
			
		||||
    });
 | 
			
		||||
    it('create task on db', async () => {
 | 
			
		||||
      const save = jest
 | 
			
		||||
        .spyOn(taskRepository, 'save')
 | 
			
		||||
        .mockImplementation(async (data: any) => data);
 | 
			
		||||
      const findOne = jest.spyOn(taskRepository, 'findOne');
 | 
			
		||||
      jest
 | 
			
		||||
        .spyOn(service, 'doNextTask')
 | 
			
		||||
        .mockImplementation(async () => undefined);
 | 
			
		||||
      await service.addTask({ pipelineId: 'test', commit: 'test', units: [] }),
 | 
			
		||||
        expect(save.mock.calls[0][0]).toMatchObject({
 | 
			
		||||
          pipelineId: 'test',
 | 
			
		||||
          commit: 'test',
 | 
			
		||||
          units: [],
 | 
			
		||||
        });
 | 
			
		||||
      expect(findOne).toBeCalled();
 | 
			
		||||
    });
 | 
			
		||||
    it('add task', async () => {
 | 
			
		||||
      const lpush = jest.spyOn(redisClient, 'lpush');
 | 
			
		||||
      const doNextTask = jest.spyOn(service, 'doNextTask');
 | 
			
		||||
      jest
 | 
			
		||||
        .spyOn(service, 'doNextTask')
 | 
			
		||||
        .mockImplementation(async () => undefined);
 | 
			
		||||
      await service.addTask({ pipelineId: 'test', commit: 'test', units: [] });
 | 
			
		||||
      expect(typeof lpush.mock.calls[0][1] === 'string').toBeTruthy();
 | 
			
		||||
      expect(JSON.parse(lpush.mock.calls[0][1] as string)).toMatchObject({
 | 
			
		||||
        pipelineId: 'test',
 | 
			
		||||
        commit: 'test',
 | 
			
		||||
        units: [],
 | 
			
		||||
        pipeline: getBasePipeline(),
 | 
			
		||||
      });
 | 
			
		||||
      expect(doNextTask).toHaveBeenCalledWith(getBasePipeline());
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('doNextTask', () => {
 | 
			
		||||
    it('add task to queue', async () => {
 | 
			
		||||
      let lckValue: string;
 | 
			
		||||
      const set = jest
 | 
			
		||||
        .spyOn(redisClient, 'set')
 | 
			
		||||
        .mockImplementation(async (...args) => (lckValue = args[3] as string));
 | 
			
		||||
      const get = jest
 | 
			
		||||
        .spyOn(redisClient, 'get')
 | 
			
		||||
        .mockImplementation(async () => lckValue);
 | 
			
		||||
      const del = jest.spyOn(redisClient, 'del');
 | 
			
		||||
      const rpop = jest.spyOn(redisClient, 'rpop');
 | 
			
		||||
      const add = jest.spyOn(taskQueue, 'add');
 | 
			
		||||
 | 
			
		||||
      await service.doNextTask(getBasePipeline());
 | 
			
		||||
 | 
			
		||||
      expect(add).toHaveBeenCalledWith(getTask());
 | 
			
		||||
      expect(set).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(rpop).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(get).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(del).toHaveBeenCalledTimes(1);
 | 
			
		||||
    });
 | 
			
		||||
    it('pipeline is busy', async () => {
 | 
			
		||||
      let remainTimes = 3;
 | 
			
		||||
 | 
			
		||||
      let lckValue: string;
 | 
			
		||||
      const set = jest
 | 
			
		||||
        .spyOn(redisClient, 'set')
 | 
			
		||||
        .mockImplementation(async (...args) => {
 | 
			
		||||
          if (remainTimes-- > 0) {
 | 
			
		||||
            throw new Error();
 | 
			
		||||
          } else {
 | 
			
		||||
            lckValue = args[3] as string;
 | 
			
		||||
          }
 | 
			
		||||
        });
 | 
			
		||||
      const get = jest
 | 
			
		||||
        .spyOn(redisClient, 'get')
 | 
			
		||||
        .mockImplementation(async () => lckValue);
 | 
			
		||||
      const del = jest.spyOn(redisClient, 'del');
 | 
			
		||||
      const rpop = jest.spyOn(redisClient, 'rpop');
 | 
			
		||||
      const add = jest.spyOn(taskQueue, 'add');
 | 
			
		||||
 | 
			
		||||
      await service.doNextTask(getBasePipeline());
 | 
			
		||||
 | 
			
		||||
      expect(rpop).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(set).toHaveBeenCalledTimes(4);
 | 
			
		||||
      expect(get).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(del).toHaveBeenCalledTimes(1);
 | 
			
		||||
      expect(add).toHaveBeenCalledWith(getTask());
 | 
			
		||||
    }, 10_000);
 | 
			
		||||
    it('pipeline always busy and timeout', async () => {
 | 
			
		||||
      const set = jest
 | 
			
		||||
        .spyOn(redisClient, 'set')
 | 
			
		||||
        .mockImplementation(async () => {
 | 
			
		||||
          throw new Error();
 | 
			
		||||
        });
 | 
			
		||||
      const get = jest.spyOn(redisClient, 'get');
 | 
			
		||||
      const del = jest.spyOn(redisClient, 'del');
 | 
			
		||||
 | 
			
		||||
      await expect(
 | 
			
		||||
        service.doNextTask(getBasePipeline()),
 | 
			
		||||
      ).rejects.toBeInstanceOf(LockFailedException);
 | 
			
		||||
 | 
			
		||||
      expect(set).toHaveBeenCalledTimes(5);
 | 
			
		||||
      expect(get).toHaveBeenCalledTimes(0);
 | 
			
		||||
      expect(del).toHaveBeenCalledTimes(0);
 | 
			
		||||
    }, 15_000);
 | 
			
		||||
  });
 | 
			
		||||
  // describe('addTask', () => {
 | 
			
		||||
  //   beforeEach(() => {
 | 
			
		||||
  //     jest
 | 
			
		||||
  //       .spyOn(pipelineRepository, 'findOneOrFail')
 | 
			
		||||
  //       .mockImplementation(async () => getBasePipeline());
 | 
			
		||||
  //   });
 | 
			
		||||
  //   it('pipeline not found', async () => {
 | 
			
		||||
  //     jest.spyOn(taskRepository, 'findOneOrFail').mockImplementation(() => {
 | 
			
		||||
  //       throw new EntityNotFoundError(Pipeline, {});
 | 
			
		||||
  //     });
 | 
			
		||||
  //     await expect(
 | 
			
		||||
  //       service.addTask({ pipelineId: 'test', commit: 'test', units: [] }),
 | 
			
		||||
  //     ).rejects;
 | 
			
		||||
  //   });
 | 
			
		||||
  //   it('create task on db', async () => {
 | 
			
		||||
  //     const save = jest
 | 
			
		||||
  //       .spyOn(taskRepository, 'save')
 | 
			
		||||
  //       .mockImplementation(async (data: any) => data);
 | 
			
		||||
  //     const findOne = jest.spyOn(taskRepository, 'findOne');
 | 
			
		||||
  //     await service.addTask({ pipelineId: 'test', commit: 'test', units: [] }),
 | 
			
		||||
  //       expect(save.mock.calls[0][0]).toMatchObject({
 | 
			
		||||
  //         pipelineId: 'test',
 | 
			
		||||
  //         commit: 'test',
 | 
			
		||||
  //         units: [],
 | 
			
		||||
  //       });
 | 
			
		||||
  //     expect(findOne).toBeCalled();
 | 
			
		||||
  //   });
 | 
			
		||||
  //   it('add task', async () => {
 | 
			
		||||
  //     const lpush = jest.spyOn(redisClient, 'lpush');
 | 
			
		||||
  //     await service.addTask({ pipelineId: 'test', commit: 'test', units: [] });
 | 
			
		||||
  //     expect(typeof lpush.mock.calls[0][1] === 'string').toBeTruthy();
 | 
			
		||||
  //     expect(JSON.parse(lpush.mock.calls[0][1] as string)).toMatchObject({
 | 
			
		||||
  //       pipelineId: 'test',
 | 
			
		||||
  //       commit: 'test',
 | 
			
		||||
  //       units: [],
 | 
			
		||||
  //       pipeline: getBasePipeline(),
 | 
			
		||||
  //     });
 | 
			
		||||
  //   });
 | 
			
		||||
  // });
 | 
			
		||||
});
 | 
			
		||||
 
 | 
			
		||||
@@ -1,63 +1,59 @@
 | 
			
		||||
import { ConflictException, Injectable } from '@nestjs/common';
 | 
			
		||||
import { BadRequestException, Injectable } from '@nestjs/common';
 | 
			
		||||
import { InjectRepository } from '@nestjs/typeorm';
 | 
			
		||||
import { PipelineTask } from './pipeline-task.entity';
 | 
			
		||||
import { In, Repository } from 'typeorm';
 | 
			
		||||
import { Repository } from 'typeorm';
 | 
			
		||||
import { CreatePipelineTaskInput } from './dtos/create-pipeline-task.input';
 | 
			
		||||
import { RedisService } from 'nestjs-redis';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { InjectQueue } from '@nestjs/bull';
 | 
			
		||||
import { PIPELINE_TASK_QUEUE } from './pipeline-tasks.constants';
 | 
			
		||||
import { Queue } from 'bull';
 | 
			
		||||
import { LockFailedException } from '../commons/exceptions/lock-failed.exception';
 | 
			
		||||
import { PubSub } from 'apollo-server-express';
 | 
			
		||||
import { TaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { isNil } from 'ramda';
 | 
			
		||||
import debug from 'debug';
 | 
			
		||||
import { AmqpConnection, RabbitRPC } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
  QUEUE_PIPELINE_TASK_DONE,
 | 
			
		||||
  ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
} from './pipeline-tasks.constants';
 | 
			
		||||
import { PipelineTaskFlushService } from './pipeline-task-flush.service';
 | 
			
		||||
import { find, isNil, propEq } from 'ramda';
 | 
			
		||||
import { PipelineTaskLogs } from './models/pipeline-task-logs.model';
 | 
			
		||||
import { TaskStatuses, terminalTaskStatuses } from './enums/task-statuses.enum';
 | 
			
		||||
import { InjectPinoLogger, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
import { getAppInstanceRouteKey } from '../commons/utils/rabbit-mq';
 | 
			
		||||
import { ROUTE_PIPELINE_TASK_KILL } from './pipeline-tasks.constants';
 | 
			
		||||
 | 
			
		||||
const log = debug('fennec:pipeline-tasks:service');
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class PipelineTasksService {
 | 
			
		||||
  pubSub = new PubSub();
 | 
			
		||||
  constructor(
 | 
			
		||||
    @InjectRepository(PipelineTask)
 | 
			
		||||
    private readonly repository: Repository<PipelineTask>,
 | 
			
		||||
    @InjectRepository(Pipeline)
 | 
			
		||||
    private readonly pipelineRepository: Repository<Pipeline>,
 | 
			
		||||
    @InjectQueue(PIPELINE_TASK_QUEUE)
 | 
			
		||||
    private readonly queue: Queue<PipelineTask>,
 | 
			
		||||
    private readonly redis: RedisService,
 | 
			
		||||
    private readonly amqpConnection: AmqpConnection,
 | 
			
		||||
    private readonly eventFlushService: PipelineTaskFlushService,
 | 
			
		||||
    @InjectPinoLogger(PipelineTasksService.name)
 | 
			
		||||
    private readonly logger: PinoLogger,
 | 
			
		||||
  ) {}
 | 
			
		||||
  async addTask(dto: CreatePipelineTaskInput) {
 | 
			
		||||
    const pipeline = await this.pipelineRepository.findOneOrFail({
 | 
			
		||||
      where: { id: dto.pipelineId },
 | 
			
		||||
      relations: ['project'],
 | 
			
		||||
    });
 | 
			
		||||
    const hasUnfinishedTask = await this.repository
 | 
			
		||||
      .findOne({
 | 
			
		||||
        pipelineId: dto.pipelineId,
 | 
			
		||||
        commit: dto.commit,
 | 
			
		||||
        status: In([TaskStatuses.pending, TaskStatuses.working]),
 | 
			
		||||
      })
 | 
			
		||||
      .then((val) => !isNil(val));
 | 
			
		||||
    if (hasUnfinishedTask) {
 | 
			
		||||
      throw new ConflictException(
 | 
			
		||||
        'There are the same tasks among the unfinished tasks!',
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
    // const hasUnfinishedTask = await this.repository
 | 
			
		||||
    //   .findOne({
 | 
			
		||||
    //     pipelineId: dto.pipelineId,
 | 
			
		||||
    //     commit: dto.commit,
 | 
			
		||||
    //     status: In([TaskStatuses.pending, TaskStatuses.working]),
 | 
			
		||||
    //   })
 | 
			
		||||
    //   .then((val) => !isNil(val));
 | 
			
		||||
    // if (hasUnfinishedTask) {
 | 
			
		||||
    //   throw new ConflictException(
 | 
			
		||||
    //     'There are the same tasks among the unfinished tasks!',
 | 
			
		||||
    //   );
 | 
			
		||||
    // }
 | 
			
		||||
    const task = await this.repository.save(this.repository.create(dto));
 | 
			
		||||
    task.pipeline = pipeline;
 | 
			
		||||
 | 
			
		||||
    const tasksKey = this.getRedisTokens(pipeline)[1];
 | 
			
		||||
    const redis = this.redis.getClient();
 | 
			
		||||
    await redis.lpush(tasksKey, JSON.stringify(task));
 | 
			
		||||
    log(
 | 
			
		||||
      'add task %s:%s-%s',
 | 
			
		||||
      task.id,
 | 
			
		||||
      task.pipeline.branch,
 | 
			
		||||
      task.commit.slice(0, 6),
 | 
			
		||||
    );
 | 
			
		||||
    await this.doNextTask(pipeline);
 | 
			
		||||
    this.amqpConnection.publish('new-pipeline-task', 'mac', task);
 | 
			
		||||
    return task;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -69,60 +65,94 @@ export class PipelineTasksService {
 | 
			
		||||
    return await this.repository.find({ pipelineId });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async doNextTask(pipeline: Pipeline) {
 | 
			
		||||
    const [lckKey, tasksKey] = this.getRedisTokens(pipeline);
 | 
			
		||||
    const redis = this.redis.getClient();
 | 
			
		||||
 | 
			
		||||
    log('doNextTask()');
 | 
			
		||||
    const unLck = await new Promise<() => Promise<void>>(
 | 
			
		||||
      async (resolve, reject) => {
 | 
			
		||||
        const lckValue = Date.now().toString();
 | 
			
		||||
        for (let i = 0; i < 5; i++) {
 | 
			
		||||
          if (
 | 
			
		||||
            await redis
 | 
			
		||||
              .set(lckKey, 0, 'EX', lckValue, 'NX')
 | 
			
		||||
              .then(() => true)
 | 
			
		||||
              .catch(() => false)
 | 
			
		||||
          ) {
 | 
			
		||||
            resolve(async () => {
 | 
			
		||||
              if ((await redis.get(lckKey)) === lckValue) {
 | 
			
		||||
                await redis.del(lckKey);
 | 
			
		||||
              }
 | 
			
		||||
            });
 | 
			
		||||
            return;
 | 
			
		||||
          }
 | 
			
		||||
          await new Promise((resolve) => setTimeout(resolve, 2000));
 | 
			
		||||
        }
 | 
			
		||||
        reject(new LockFailedException(lckKey));
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    const task = JSON.parse(
 | 
			
		||||
      (await redis.rpop(tasksKey).finally(() => unLck())) ?? 'null',
 | 
			
		||||
    );
 | 
			
		||||
    if (task) {
 | 
			
		||||
      log(
 | 
			
		||||
        'add task (%s:%s-%s) to queue',
 | 
			
		||||
        task.id,
 | 
			
		||||
        task.pipeline.branch,
 | 
			
		||||
        task.commit.slice(0, 6),
 | 
			
		||||
      );
 | 
			
		||||
      await this.queue.add(task);
 | 
			
		||||
    } else {
 | 
			
		||||
      log('task is empty');
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async updateTask(task: PipelineTask) {
 | 
			
		||||
    this.pubSub.publish(task.id, task);
 | 
			
		||||
    return await this.repository.save(task);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async watchTaskUpdated(id: string) {
 | 
			
		||||
    return this.pubSub.asyncIterator(id);
 | 
			
		||||
  async listTasksByCommitHash(hash: string) {
 | 
			
		||||
    return await this.repository.find({
 | 
			
		||||
      where: { commit: hash },
 | 
			
		||||
      order: { createdAt: 'DESC' },
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  getRedisTokens(pipeline: Pipeline): [string, string] {
 | 
			
		||||
    return [`pipeline-${pipeline.id}:lck`, `pipeline-${pipeline.id}:tasks`];
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @RabbitRPC({
 | 
			
		||||
    exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
    routingKey: ROUTE_PIPELINE_TASK_DONE,
 | 
			
		||||
    queue: QUEUE_PIPELINE_TASK_DONE,
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
      durable: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async updateByEvent({ taskId, runOn }: { taskId: string; runOn: string }) {
 | 
			
		||||
    try {
 | 
			
		||||
      const [events, task] = await Promise.all([
 | 
			
		||||
        this.eventFlushService.read(taskId),
 | 
			
		||||
        this.findTaskById(taskId),
 | 
			
		||||
      ]);
 | 
			
		||||
      this.logger.info('[updateByEvent] start. taskId: %s', taskId);
 | 
			
		||||
 | 
			
		||||
      for (const event of events) {
 | 
			
		||||
        if (isNil(event.unit)) {
 | 
			
		||||
          if (
 | 
			
		||||
            event.status !== TaskStatuses.pending &&
 | 
			
		||||
            task.status === TaskStatuses.pending
 | 
			
		||||
          ) {
 | 
			
		||||
            task.startedAt = event.emittedAt;
 | 
			
		||||
          } else if (terminalTaskStatuses.includes(event.status)) {
 | 
			
		||||
            task.endedAt = event.emittedAt;
 | 
			
		||||
          }
 | 
			
		||||
          task.status = event.status;
 | 
			
		||||
        } else {
 | 
			
		||||
          let l: PipelineTaskLogs = find<PipelineTaskLogs>(
 | 
			
		||||
            propEq('unit', event.unit),
 | 
			
		||||
            task.logs,
 | 
			
		||||
          );
 | 
			
		||||
 | 
			
		||||
          if (isNil(l)) {
 | 
			
		||||
            l = {
 | 
			
		||||
              unit: event.unit,
 | 
			
		||||
              startedAt: event.emittedAt,
 | 
			
		||||
              endedAt: null,
 | 
			
		||||
              logs: event.message,
 | 
			
		||||
              status: event.status,
 | 
			
		||||
            };
 | 
			
		||||
 | 
			
		||||
            task.logs.push(l);
 | 
			
		||||
          } else {
 | 
			
		||||
            l.logs += event.message;
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          if (terminalTaskStatuses.includes(event.status)) {
 | 
			
		||||
            l.endedAt = event.emittedAt;
 | 
			
		||||
          }
 | 
			
		||||
          l.status = event.status;
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
      task.runOn = runOn;
 | 
			
		||||
      await this.repository.update({ id: taskId }, task);
 | 
			
		||||
      this.logger.info('[updateByEvent] success. taskId: %s', taskId);
 | 
			
		||||
      return task;
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      this.logger.error(
 | 
			
		||||
        { error },
 | 
			
		||||
        '[updateByEvent] failed. taskId: %s',
 | 
			
		||||
        taskId,
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async stopTask(task: PipelineTask) {
 | 
			
		||||
    if (isNil(task.runOn)) {
 | 
			
		||||
      throw new BadRequestException(
 | 
			
		||||
        "the task have not running instance on database. field 'runOn' is nil",
 | 
			
		||||
      );
 | 
			
		||||
    }
 | 
			
		||||
    await this.amqpConnection.request({
 | 
			
		||||
      exchange: EXCHANGE_PIPELINE_TASK_TOPIC,
 | 
			
		||||
      routingKey: getAppInstanceRouteKey(ROUTE_PIPELINE_TASK_KILL, task.runOn),
 | 
			
		||||
      payload: task,
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										30
									
								
								src/pipelines/commit-logs.resolver.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								src/pipelines/commit-logs.resolver.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,30 @@
 | 
			
		||||
import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelineTasksService } from '../pipeline-tasks/pipeline-tasks.service';
 | 
			
		||||
import { CommitLogsResolver } from './commit-logs.resolver';
 | 
			
		||||
import { PipelinesService } from './pipelines.service';
 | 
			
		||||
 | 
			
		||||
describe('CommitLogsResolver', () => {
 | 
			
		||||
  let resolver: CommitLogsResolver;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    const module: TestingModule = await Test.createTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        CommitLogsResolver,
 | 
			
		||||
        {
 | 
			
		||||
          provide: PipelinesService,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: PipelineTasksService,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    resolver = module.get<CommitLogsResolver>(CommitLogsResolver);
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(resolver).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										42
									
								
								src/pipelines/commit-logs.resolver.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								src/pipelines/commit-logs.resolver.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,42 @@
 | 
			
		||||
import { Query } from '@nestjs/graphql';
 | 
			
		||||
import {
 | 
			
		||||
  Args,
 | 
			
		||||
  Parent,
 | 
			
		||||
  ResolveField,
 | 
			
		||||
  Resolver,
 | 
			
		||||
  Subscription,
 | 
			
		||||
} from '@nestjs/graphql';
 | 
			
		||||
import { PipelineTasksService } from '../pipeline-tasks/pipeline-tasks.service';
 | 
			
		||||
import { Commit, LogFields } from '../repos/dtos/log-list.model';
 | 
			
		||||
import { PipelinesService } from './pipelines.service';
 | 
			
		||||
 | 
			
		||||
@Resolver(() => Commit)
 | 
			
		||||
export class CommitLogsResolver {
 | 
			
		||||
  constructor(
 | 
			
		||||
    private readonly service: PipelinesService,
 | 
			
		||||
    private readonly taskServices: PipelineTasksService,
 | 
			
		||||
  ) {}
 | 
			
		||||
  @Subscription(() => String, { resolve: (val) => val, nullable: true })
 | 
			
		||||
  async syncCommits(
 | 
			
		||||
    @Args('pipelineId', { type: () => String })
 | 
			
		||||
    pipelineId: string,
 | 
			
		||||
    @Args('appInstance', { type: () => String, nullable: true })
 | 
			
		||||
    appInstance?: string,
 | 
			
		||||
  ) {
 | 
			
		||||
    const pipeline = await this.service.findOneWithProject(pipelineId);
 | 
			
		||||
    const syncCommitsPromise = this.service.syncCommits(pipeline, appInstance);
 | 
			
		||||
    return (async function* () {
 | 
			
		||||
      yield await syncCommitsPromise;
 | 
			
		||||
    })();
 | 
			
		||||
  }
 | 
			
		||||
  @ResolveField()
 | 
			
		||||
  async tasks(@Parent() commit: LogFields) {
 | 
			
		||||
    return await this.taskServices.listTasksByCommitHash(commit.hash);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Query(() => [Commit], { nullable: true })
 | 
			
		||||
  async commits(@Args('pipelineId', { type: () => String }) id: string) {
 | 
			
		||||
    const pipeline = await this.service.findOneWithProject(id);
 | 
			
		||||
    return await this.service.listCommits(pipeline);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,5 +1,9 @@
 | 
			
		||||
import { InputType } from '@nestjs/graphql';
 | 
			
		||||
import { InputType, OmitType } from '@nestjs/graphql';
 | 
			
		||||
import { CreatePipelineInput } from './create-pipeline.input';
 | 
			
		||||
 | 
			
		||||
@InputType()
 | 
			
		||||
export class UpdatePipelineInput extends CreatePipelineInput {}
 | 
			
		||||
export class UpdatePipelineInput extends OmitType(CreatePipelineInput, [
 | 
			
		||||
  'projectId',
 | 
			
		||||
]) {
 | 
			
		||||
  id: string;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,16 +3,25 @@ import { PipelinesResolver } from './pipelines.resolver';
 | 
			
		||||
import { PipelinesService } from './pipelines.service';
 | 
			
		||||
import { TypeOrmModule } from '@nestjs/typeorm';
 | 
			
		||||
import { Pipeline } from './pipeline.entity';
 | 
			
		||||
import { BullModule } from '@nestjs/bull';
 | 
			
		||||
import { LIST_LOGS_TASK } from '../repos/repos.constants';
 | 
			
		||||
import { CommitLogsResolver } from './commit-logs.resolver';
 | 
			
		||||
import { PipelineTasksModule } from '../pipeline-tasks/pipeline-tasks.module';
 | 
			
		||||
import { ReposModule } from '../repos/repos.module';
 | 
			
		||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { ConfigModule, ConfigService } from '@nestjs/config';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [
 | 
			
		||||
    TypeOrmModule.forFeature([Pipeline]),
 | 
			
		||||
    BullModule.registerQueue({
 | 
			
		||||
      name: LIST_LOGS_TASK,
 | 
			
		||||
    PipelineTasksModule,
 | 
			
		||||
    RabbitMQModule.forRootAsync(RabbitMQModule, {
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
        uri: configService.get<string>('db.rabbitmq.uri'),
 | 
			
		||||
        exchanges: [],
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
  ],
 | 
			
		||||
  providers: [PipelinesResolver, PipelinesService],
 | 
			
		||||
  providers: [PipelinesResolver, PipelinesService, CommitLogsResolver],
 | 
			
		||||
})
 | 
			
		||||
export class PipelinesModule {}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,39 +1,37 @@
 | 
			
		||||
import { Args, Mutation, Query, Resolver, Subscription } from '@nestjs/graphql';
 | 
			
		||||
import { Args, Mutation, Query, Resolver } from '@nestjs/graphql';
 | 
			
		||||
import { CreatePipelineInput } from './dtos/create-pipeline.input';
 | 
			
		||||
import { UpdatePipelineInput } from './dtos/update-pipeline.input';
 | 
			
		||||
import { Pipeline } from './pipeline.entity';
 | 
			
		||||
import { PipelinesService } from './pipelines.service';
 | 
			
		||||
import { ListPipelineArgs } from './dtos/list-pipelines.args';
 | 
			
		||||
import { LogList } from '../repos/dtos/log-list.model';
 | 
			
		||||
 | 
			
		||||
@Resolver()
 | 
			
		||||
export class PipelinesResolver {
 | 
			
		||||
  constructor(private readonly service: PipelinesService) {}
 | 
			
		||||
  @Query(() => [Pipeline])
 | 
			
		||||
  async listPipelines(@Args() dto: ListPipelineArgs) {
 | 
			
		||||
  async pipelines(@Args() dto: ListPipelineArgs) {
 | 
			
		||||
    return await this.service.list(dto);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Query(() => Pipeline)
 | 
			
		||||
  async findPipeline(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
  async pipeline(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
    return await this.service.findOne(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => Pipeline)
 | 
			
		||||
  async createPipeline(
 | 
			
		||||
    @Args('pipeline', { type: () => CreatePipelineInput })
 | 
			
		||||
    dto: UpdatePipelineInput,
 | 
			
		||||
    dto: CreatePipelineInput,
 | 
			
		||||
  ) {
 | 
			
		||||
    return await this.service.create(dto);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => Pipeline)
 | 
			
		||||
  async modifyPipeline(
 | 
			
		||||
    @Args('id', { type: () => String }) id: string,
 | 
			
		||||
    @Args('Pipeline', { type: () => UpdatePipelineInput })
 | 
			
		||||
  async updatePipeline(
 | 
			
		||||
    @Args('pipeline', { type: () => UpdatePipelineInput })
 | 
			
		||||
    dto: UpdatePipelineInput,
 | 
			
		||||
  ) {
 | 
			
		||||
    const tmp = await this.service.update(id, dto);
 | 
			
		||||
    const tmp = await this.service.update(dto);
 | 
			
		||||
    console.log(tmp);
 | 
			
		||||
    return tmp;
 | 
			
		||||
  }
 | 
			
		||||
@@ -42,16 +40,4 @@ export class PipelinesResolver {
 | 
			
		||||
  async deletePipeline(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
    return await this.service.remove(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Subscription(() => LogList, {
 | 
			
		||||
    resolve: (value) => {
 | 
			
		||||
      return value;
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async listLogsForPipeline(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
    const job = await this.service.listLogsForPipeline(id);
 | 
			
		||||
    return (async function* () {
 | 
			
		||||
      yield await job.finished();
 | 
			
		||||
    })();
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,18 +2,14 @@ import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { PipelinesService } from './pipelines.service';
 | 
			
		||||
import { Pipeline } from './pipeline.entity';
 | 
			
		||||
import { getRepositoryToken } from '@nestjs/typeorm';
 | 
			
		||||
import { getQueueToken } from '@nestjs/bull';
 | 
			
		||||
import { LIST_LOGS_TASK } from '../repos/repos.constants';
 | 
			
		||||
import { Repository } from 'typeorm';
 | 
			
		||||
import { Project } from '../projects/project.entity';
 | 
			
		||||
import { Job, Queue } from 'bull';
 | 
			
		||||
import { ListLogsOption } from '../repos/models/list-logs.options';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
 | 
			
		||||
describe('PipelinesService', () => {
 | 
			
		||||
  let service: PipelinesService;
 | 
			
		||||
  let repository: Repository<Pipeline>;
 | 
			
		||||
  let pipeline: Pipeline;
 | 
			
		||||
  let queue: Queue<ListLogsOption>;
 | 
			
		||||
 | 
			
		||||
  beforeEach(async () => {
 | 
			
		||||
    pipeline = Object.assign(new Pipeline(), {
 | 
			
		||||
@@ -37,33 +33,17 @@ describe('PipelinesService', () => {
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: getQueueToken(LIST_LOGS_TASK),
 | 
			
		||||
          useValue: {
 | 
			
		||||
            add: jest.fn().mockImplementation(() => ({ id: 1 } as Job)),
 | 
			
		||||
          },
 | 
			
		||||
          provide: AmqpConnection,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
    service = module.get<PipelinesService>(PipelinesService);
 | 
			
		||||
    repository = module.get(getRepositoryToken(Pipeline));
 | 
			
		||||
    queue = module.get(getQueueToken(LIST_LOGS_TASK));
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  it('should be defined', () => {
 | 
			
		||||
    expect(service).toBeDefined();
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('listLogsForPipeline', () => {
 | 
			
		||||
    it('should send task to queue.', async () => {
 | 
			
		||||
      const add = jest.spyOn(queue, 'add');
 | 
			
		||||
      await expect(
 | 
			
		||||
        service.listLogsForPipeline('test-pipeline'),
 | 
			
		||||
      ).resolves.toEqual({ id: 1 });
 | 
			
		||||
      expect(add).toBeCalledWith({
 | 
			
		||||
        project: pipeline.project,
 | 
			
		||||
        branch: pipeline.branch,
 | 
			
		||||
      });
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
 
 | 
			
		||||
@@ -6,10 +6,16 @@ import { BaseDbService } from '../commons/services/base-db.service';
 | 
			
		||||
import { CreatePipelineInput } from './dtos/create-pipeline.input';
 | 
			
		||||
import { UpdatePipelineInput } from './dtos/update-pipeline.input';
 | 
			
		||||
import { ListPipelineArgs } from './dtos/list-pipelines.args';
 | 
			
		||||
import { InjectQueue } from '@nestjs/bull';
 | 
			
		||||
import { LIST_LOGS_TASK } from '../repos/repos.constants';
 | 
			
		||||
import { Queue } from 'bull';
 | 
			
		||||
import { ListLogsOption } from '../repos/models/list-logs.options';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_REPO,
 | 
			
		||||
  ROUTE_FETCH,
 | 
			
		||||
  ROUTE_LIST_COMMITS,
 | 
			
		||||
} from '../repos/repos.constants';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { Commit } from '../repos/dtos/log-list.model';
 | 
			
		||||
import { getAppInstanceRouteKey } from '../commons/utils/rabbit-mq';
 | 
			
		||||
import { ApplicationException } from '../commons/exceptions/application.exception';
 | 
			
		||||
import { plainToClass } from 'class-transformer';
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class PipelinesService extends BaseDbService<Pipeline> {
 | 
			
		||||
@@ -17,8 +23,7 @@ export class PipelinesService extends BaseDbService<Pipeline> {
 | 
			
		||||
  constructor(
 | 
			
		||||
    @InjectRepository(Pipeline)
 | 
			
		||||
    readonly repository: Repository<Pipeline>,
 | 
			
		||||
    @InjectQueue(LIST_LOGS_TASK)
 | 
			
		||||
    private readonly listLogsQueue: Queue<ListLogsOption>,
 | 
			
		||||
    private readonly amqpConnection: AmqpConnection,
 | 
			
		||||
  ) {
 | 
			
		||||
    super(repository);
 | 
			
		||||
  }
 | 
			
		||||
@@ -26,30 +31,48 @@ export class PipelinesService extends BaseDbService<Pipeline> {
 | 
			
		||||
    return this.repository.find(dto);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async findOneWithProject(id: string) {
 | 
			
		||||
    return await this.repository.findOne({
 | 
			
		||||
      where: { id },
 | 
			
		||||
      relations: ['project'],
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async create(dto: CreatePipelineInput) {
 | 
			
		||||
    await this.isDuplicateEntity(dto);
 | 
			
		||||
    return await this.repository.save(this.repository.create(dto));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async update(id: string, dto: UpdatePipelineInput) {
 | 
			
		||||
    await this.isDuplicateEntityForUpdate(id, dto);
 | 
			
		||||
    const old = await this.findOne(id);
 | 
			
		||||
  async update(dto: UpdatePipelineInput) {
 | 
			
		||||
    const old = await this.findOne(dto.id);
 | 
			
		||||
    await this.isDuplicateEntityForUpdate(old, dto);
 | 
			
		||||
    return await this.repository.save(this.repository.merge(old, dto));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async remove(id: string) {
 | 
			
		||||
    return (await this.repository.softDelete({ id })).affected;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async listLogsForPipeline(id: string) {
 | 
			
		||||
    const pipeline = await this.repository.findOneOrFail({
 | 
			
		||||
      where: { id },
 | 
			
		||||
      relations: ['project'],
 | 
			
		||||
  async syncCommits(pipeline: Pipeline, appInstance?: string) {
 | 
			
		||||
    return await this.amqpConnection.request<string | null>({
 | 
			
		||||
      exchange: EXCHANGE_REPO,
 | 
			
		||||
      routingKey: getAppInstanceRouteKey(ROUTE_FETCH, appInstance),
 | 
			
		||||
      payload: pipeline,
 | 
			
		||||
      timeout: 120_000,
 | 
			
		||||
    });
 | 
			
		||||
    const job = await this.listLogsQueue.add({
 | 
			
		||||
      project: pipeline.project,
 | 
			
		||||
      branch: pipeline.branch,
 | 
			
		||||
    });
 | 
			
		||||
    return job;
 | 
			
		||||
  }
 | 
			
		||||
  async listCommits(pipeline: Pipeline) {
 | 
			
		||||
    return await this.amqpConnection
 | 
			
		||||
      .request<[Error, Commit[]]>({
 | 
			
		||||
        exchange: EXCHANGE_REPO,
 | 
			
		||||
        routingKey: ROUTE_LIST_COMMITS,
 | 
			
		||||
        payload: pipeline,
 | 
			
		||||
        timeout: 30_000,
 | 
			
		||||
      })
 | 
			
		||||
      .then(([error, list]) => {
 | 
			
		||||
        if (error) {
 | 
			
		||||
          throw new ApplicationException(error);
 | 
			
		||||
        }
 | 
			
		||||
        return plainToClass(Commit, list);
 | 
			
		||||
      });
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -21,7 +21,10 @@ export class CreateProjectInput {
 | 
			
		||||
  comment: string;
 | 
			
		||||
 | 
			
		||||
  @Matches(
 | 
			
		||||
    /^(?:ssh:\/\/)?(?:[\w\d-_]+@)(?:[\w\d-_]+\.)*\w{2,10}(?::\d{1,5})?(?:\/[\w\d-_.]+)*/,
 | 
			
		||||
    /^(?:ssh:\/\/)?(?:[\w\d-_]+@)?(?:[\w\d-_]+\.)*\w{2,10}(?::\d{1,5})?(?:\/[\w\d-_.]+)*/,
 | 
			
		||||
    {
 | 
			
		||||
      message: 'wrong ssh url',
 | 
			
		||||
    },
 | 
			
		||||
  )
 | 
			
		||||
  @MaxLength(256)
 | 
			
		||||
  sshUrl: string;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,9 @@
 | 
			
		||||
import { InputType } from '@nestjs/graphql';
 | 
			
		||||
import { IsUUID } from 'class-validator';
 | 
			
		||||
import { CreateProjectInput } from './create-project.input';
 | 
			
		||||
 | 
			
		||||
@InputType()
 | 
			
		||||
export class UpdateProjectInput extends CreateProjectInput {}
 | 
			
		||||
export class UpdateProjectInput extends CreateProjectInput {
 | 
			
		||||
  @IsUUID()
 | 
			
		||||
  id: string;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										3
									
								
								src/projects/projects.constants.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								src/projects/projects.constants.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,3 @@
 | 
			
		||||
export const EXCHANGE_PROJECT_TOPIC = 'project.topic';
 | 
			
		||||
export const EXCHANGE_PROJECT_FANOUT = 'project.fanout';
 | 
			
		||||
export const ROUTE_PROJECT_CHANGE = 'project-change';
 | 
			
		||||
@@ -3,9 +3,31 @@ import { ProjectsService } from './projects.service';
 | 
			
		||||
import { ProjectsResolver } from './projects.resolver';
 | 
			
		||||
import { TypeOrmModule } from '@nestjs/typeorm';
 | 
			
		||||
import { Project } from './project.entity';
 | 
			
		||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { ConfigModule, ConfigService } from '@nestjs/config';
 | 
			
		||||
import { EXCHANGE_PROJECT_FANOUT } from './projects.constants';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [TypeOrmModule.forFeature([Project])],
 | 
			
		||||
  imports: [
 | 
			
		||||
    TypeOrmModule.forFeature([Project]),
 | 
			
		||||
    RabbitMQModule.forRootAsync(RabbitMQModule, {
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
        uri: configService.get<string>('db.rabbitmq.uri'),
 | 
			
		||||
        exchanges: [
 | 
			
		||||
          {
 | 
			
		||||
            name: EXCHANGE_PROJECT_FANOUT,
 | 
			
		||||
            type: 'fanout',
 | 
			
		||||
            options: {
 | 
			
		||||
              durable: false,
 | 
			
		||||
              autoDelete: true,
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
        ],
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
  ],
 | 
			
		||||
  providers: [ProjectsService, ProjectsResolver],
 | 
			
		||||
  exports: [ProjectsService],
 | 
			
		||||
})
 | 
			
		||||
 
 | 
			
		||||
@@ -8,12 +8,12 @@ import { ProjectsService } from './projects.service';
 | 
			
		||||
export class ProjectsResolver {
 | 
			
		||||
  constructor(private readonly service: ProjectsService) {}
 | 
			
		||||
  @Query(() => [Project])
 | 
			
		||||
  async findProjects() {
 | 
			
		||||
  async projects() {
 | 
			
		||||
    return await this.service.list();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Query(() => Project)
 | 
			
		||||
  async findProject(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
  async project(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
    return await this.service.findOne(id);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -26,18 +26,17 @@ export class ProjectsResolver {
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => Project)
 | 
			
		||||
  async modifyProject(
 | 
			
		||||
    @Args('id', { type: () => String }) id: string,
 | 
			
		||||
  async updateProject(
 | 
			
		||||
    @Args('project', { type: () => UpdateProjectInput })
 | 
			
		||||
    dto: UpdateProjectInput,
 | 
			
		||||
  ) {
 | 
			
		||||
    const tmp = await this.service.update(id, dto);
 | 
			
		||||
    const tmp = await this.service.update(dto);
 | 
			
		||||
    console.log(tmp);
 | 
			
		||||
    return tmp;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @Mutation(() => Number)
 | 
			
		||||
  async deleteProject(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
  async removeProject(@Args('id', { type: () => String }) id: string) {
 | 
			
		||||
    return await this.service.remove(id);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,6 +2,7 @@ import { Test, TestingModule } from '@nestjs/testing';
 | 
			
		||||
import { ProjectsService } from './projects.service';
 | 
			
		||||
import { getRepositoryToken } from '@nestjs/typeorm';
 | 
			
		||||
import { Project } from './project.entity';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
 | 
			
		||||
describe('ProjectsService', () => {
 | 
			
		||||
  let service: ProjectsService;
 | 
			
		||||
@@ -14,6 +15,10 @@ describe('ProjectsService', () => {
 | 
			
		||||
          provide: getRepositoryToken(Project),
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: AmqpConnection,
 | 
			
		||||
          useValue: {},
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,11 @@ import { Repository } from 'typeorm';
 | 
			
		||||
import { CreateProjectInput } from './dtos/create-project.input';
 | 
			
		||||
import { Project } from './project.entity';
 | 
			
		||||
import { UpdateProjectInput } from './dtos/update-project.input';
 | 
			
		||||
import { AmqpConnection } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PROJECT_FANOUT,
 | 
			
		||||
  ROUTE_PROJECT_CHANGE,
 | 
			
		||||
} from './projects.constants';
 | 
			
		||||
 | 
			
		||||
@Injectable()
 | 
			
		||||
export class ProjectsService extends BaseDbService<Project> {
 | 
			
		||||
@@ -12,6 +17,7 @@ export class ProjectsService extends BaseDbService<Project> {
 | 
			
		||||
  constructor(
 | 
			
		||||
    @InjectRepository(Project)
 | 
			
		||||
    readonly repository: Repository<Project>,
 | 
			
		||||
    private readonly amqpConnection: AmqpConnection,
 | 
			
		||||
  ) {
 | 
			
		||||
    super(repository);
 | 
			
		||||
  }
 | 
			
		||||
@@ -25,10 +31,15 @@ export class ProjectsService extends BaseDbService<Project> {
 | 
			
		||||
    return await this.repository.save(this.repository.create(dto));
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async update(id: string, dto: UpdateProjectInput) {
 | 
			
		||||
    await this.isDuplicateEntityForUpdate(id, dto);
 | 
			
		||||
    const old = await this.findOne(id);
 | 
			
		||||
    return await this.repository.save(this.repository.merge(old, dto));
 | 
			
		||||
  async update(dto: UpdateProjectInput) {
 | 
			
		||||
    await this.isDuplicateEntityForUpdate(dto.id, dto);
 | 
			
		||||
    const old = await this.findOne(dto.id);
 | 
			
		||||
    const project = await this.repository.save(this.repository.merge(old, dto));
 | 
			
		||||
    this.amqpConnection.publish(EXCHANGE_PROJECT_FANOUT, ROUTE_PROJECT_CHANGE, [
 | 
			
		||||
      project,
 | 
			
		||||
      old,
 | 
			
		||||
    ]);
 | 
			
		||||
    return project;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async remove(id: string) {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,20 @@
 | 
			
		||||
import { ObjectType, Field } from '@nestjs/graphql';
 | 
			
		||||
import { Type } from 'class-transformer';
 | 
			
		||||
import { LogResult, DefaultLogFields } from 'simple-git';
 | 
			
		||||
import { PipelineTask } from '../../pipeline-tasks/pipeline-task.entity';
 | 
			
		||||
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class Commit {
 | 
			
		||||
  hash: string;
 | 
			
		||||
  @Type(() => Date)
 | 
			
		||||
  date: Date;
 | 
			
		||||
  message: string;
 | 
			
		||||
  refs: string;
 | 
			
		||||
  body: string;
 | 
			
		||||
  author_name: string;
 | 
			
		||||
  author_email: string;
 | 
			
		||||
  tasks: PipelineTask[];
 | 
			
		||||
}
 | 
			
		||||
@ObjectType()
 | 
			
		||||
export class LogFields {
 | 
			
		||||
  hash: string;
 | 
			
		||||
@@ -10,6 +24,7 @@ export class LogFields {
 | 
			
		||||
  body: string;
 | 
			
		||||
  author_name: string;
 | 
			
		||||
  author_email: string;
 | 
			
		||||
  tasks: PipelineTask[];
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ObjectType()
 | 
			
		||||
 
 | 
			
		||||
@@ -1,14 +0,0 @@
 | 
			
		||||
import { ReposService } from './repos.service';
 | 
			
		||||
import { Processor, Process } from '@nestjs/bull';
 | 
			
		||||
import { Job } from 'bull';
 | 
			
		||||
import { ListLogsOption } from './models/list-logs.options';
 | 
			
		||||
import { LIST_LOGS_TASK } from './repos.constants';
 | 
			
		||||
@Processor(LIST_LOGS_TASK)
 | 
			
		||||
export class ListLogsConsumer {
 | 
			
		||||
  constructor(private readonly service: ReposService) {}
 | 
			
		||||
  @Process()
 | 
			
		||||
  async listLogs(job: Job<ListLogsOption>) {
 | 
			
		||||
    const logs = await this.service.listLogs(job.data);
 | 
			
		||||
    return logs;
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,3 +1,6 @@
 | 
			
		||||
export const LIST_LOGS_TASK = 'LIST_LOGS_TASK';
 | 
			
		||||
export const LIST_LOGS_PUB_SUB = 'LIST_LOGS_PUB_SUB';
 | 
			
		||||
export const LIST_LOGS_DONE = 'LIST_LOGS_DONE';
 | 
			
		||||
export const EXCHANGE_REPO = 'fennec.repo';
 | 
			
		||||
export const ROUTE_FETCH = 'fetch';
 | 
			
		||||
export const ROUTE_LIST_COMMITS = 'list-commits';
 | 
			
		||||
export const QUEUE_LIST_COMMITS = 'list-commits';
 | 
			
		||||
export const QUEUE_FETCH = 'repo-fetch';
 | 
			
		||||
export const QUEUE_REFRESH_REPO = 'refresh-repo';
 | 
			
		||||
 
 | 
			
		||||
@@ -3,23 +3,37 @@ import { TypeOrmModule } from '@nestjs/typeorm';
 | 
			
		||||
import { Project } from '../projects/project.entity';
 | 
			
		||||
import { ReposResolver } from './repos.resolver';
 | 
			
		||||
import { ReposService } from './repos.service';
 | 
			
		||||
import { ConfigModule } from '@nestjs/config';
 | 
			
		||||
import { ConfigModule, ConfigService } from '@nestjs/config';
 | 
			
		||||
import { ProjectsModule } from '../projects/projects.module';
 | 
			
		||||
import { BullModule } from '@nestjs/bull';
 | 
			
		||||
import { LIST_LOGS_TASK, LIST_LOGS_PUB_SUB } from './repos.constants';
 | 
			
		||||
import { PubSub } from 'graphql-subscriptions';
 | 
			
		||||
import { ListLogsConsumer } from './list-logs.consumer';
 | 
			
		||||
import { EXCHANGE_REPO } from './repos.constants';
 | 
			
		||||
import { RabbitMQModule } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { CommonsModule } from '../commons/commons.module';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [
 | 
			
		||||
    TypeOrmModule.forFeature([Project]),
 | 
			
		||||
    ConfigModule,
 | 
			
		||||
    ProjectsModule,
 | 
			
		||||
    BullModule.registerQueue({
 | 
			
		||||
      name: LIST_LOGS_TASK,
 | 
			
		||||
    CommonsModule,
 | 
			
		||||
    RabbitMQModule.forRootAsync(RabbitMQModule, {
 | 
			
		||||
      imports: [ConfigModule],
 | 
			
		||||
      useFactory: (configService: ConfigService) => ({
 | 
			
		||||
        uri: configService.get<string>('db.rabbitmq.uri'),
 | 
			
		||||
        exchanges: [
 | 
			
		||||
          {
 | 
			
		||||
            name: EXCHANGE_REPO,
 | 
			
		||||
            type: 'topic',
 | 
			
		||||
            options: {
 | 
			
		||||
              durable: true,
 | 
			
		||||
              autoDelete: true,
 | 
			
		||||
            },
 | 
			
		||||
          },
 | 
			
		||||
        ],
 | 
			
		||||
      }),
 | 
			
		||||
      inject: [ConfigService],
 | 
			
		||||
    }),
 | 
			
		||||
  ],
 | 
			
		||||
  providers: [ReposResolver, ReposService, ListLogsConsumer],
 | 
			
		||||
  providers: [ReposResolver, ReposService],
 | 
			
		||||
  exports: [ReposService],
 | 
			
		||||
})
 | 
			
		||||
export class ReposModule {}
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,9 @@ import configuration from '../commons/config/configuration';
 | 
			
		||||
import { PipelineTask } from '../pipeline-tasks/pipeline-task.entity';
 | 
			
		||||
import { join } from 'path';
 | 
			
		||||
import { readFile } from 'fs/promises';
 | 
			
		||||
import { getLoggerToken, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
import { Nack } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { getInstanceName } from '../commons/utils/rabbit-mq';
 | 
			
		||||
 | 
			
		||||
const getTest1Project = () =>
 | 
			
		||||
  ({
 | 
			
		||||
@@ -45,6 +48,10 @@ describe('ReposService', () => {
 | 
			
		||||
          provide: getRepositoryToken(Project),
 | 
			
		||||
          useFactory: repositoryMockFactory,
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
          provide: getLoggerToken(ReposService.name),
 | 
			
		||||
          useValue: new PinoLogger({}),
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    }).compile();
 | 
			
		||||
 | 
			
		||||
@@ -139,4 +146,57 @@ describe('ReposService', () => {
 | 
			
		||||
      );
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
  describe('fetch', () => {
 | 
			
		||||
    it('success', async () => {
 | 
			
		||||
      const project = new Project();
 | 
			
		||||
      const pipeline = new Pipeline();
 | 
			
		||||
      pipeline.branch = 'test';
 | 
			
		||||
      const fetch = jest.fn((_: any) => Promise.resolve());
 | 
			
		||||
      pipeline.project = project;
 | 
			
		||||
      const getGit = jest.spyOn(service, 'getGit').mockImplementation(() =>
 | 
			
		||||
        Promise.resolve({
 | 
			
		||||
          fetch,
 | 
			
		||||
        } as any),
 | 
			
		||||
      );
 | 
			
		||||
      await expect(service.fetch(pipeline)).resolves.toEqual(getInstanceName());
 | 
			
		||||
      expect(getGit).toBeCalledTimes(1);
 | 
			
		||||
      expect(getGit.mock.calls[0]?.[0]).toEqual(project);
 | 
			
		||||
      expect(fetch).toBeCalledTimes(1);
 | 
			
		||||
      expect(fetch.mock.calls[0]?.[0]).toMatchObject([
 | 
			
		||||
        'origin',
 | 
			
		||||
        'test',
 | 
			
		||||
        '--depth=100',
 | 
			
		||||
      ]);
 | 
			
		||||
    });
 | 
			
		||||
    it('failed a', async () => {
 | 
			
		||||
      const project = new Project();
 | 
			
		||||
      const pipeline = new Pipeline();
 | 
			
		||||
      pipeline.branch = 'test';
 | 
			
		||||
      const fetch = jest.fn((_: any) => Promise.resolve());
 | 
			
		||||
      pipeline.project = project;
 | 
			
		||||
      const getGit = jest
 | 
			
		||||
        .spyOn(service, 'getGit')
 | 
			
		||||
        .mockImplementation(() => Promise.reject('error'));
 | 
			
		||||
      await expect(service.fetch(pipeline)).resolves.toMatchObject(new Nack());
 | 
			
		||||
      expect(getGit).toBeCalledTimes(1);
 | 
			
		||||
      expect(getGit.mock.calls[0]?.[0]).toEqual(project);
 | 
			
		||||
      expect(fetch).toBeCalledTimes(0);
 | 
			
		||||
    });
 | 
			
		||||
    it('failed b', async () => {
 | 
			
		||||
      const project = new Project();
 | 
			
		||||
      const pipeline = new Pipeline();
 | 
			
		||||
      pipeline.branch = 'test';
 | 
			
		||||
      const fetch = jest.fn((_: any) => Promise.reject('error'));
 | 
			
		||||
      pipeline.project = project;
 | 
			
		||||
      const getGit = jest.spyOn(service, 'getGit').mockImplementation(() =>
 | 
			
		||||
        Promise.resolve({
 | 
			
		||||
          fetch,
 | 
			
		||||
        } as any),
 | 
			
		||||
      );
 | 
			
		||||
      await expect(service.fetch(pipeline)).resolves.toMatchObject(new Nack());
 | 
			
		||||
      expect(getGit).toBeCalledTimes(1);
 | 
			
		||||
      expect(fetch).toBeCalledTimes(1);
 | 
			
		||||
    });
 | 
			
		||||
  });
 | 
			
		||||
});
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
import { ListLogsOption } from './models/list-logs.options';
 | 
			
		||||
import { Pipeline } from './../pipelines/pipeline.entity';
 | 
			
		||||
import { PipelineTask } from './../pipeline-tasks/pipeline-task.entity';
 | 
			
		||||
import { Injectable, NotFoundException } from '@nestjs/common';
 | 
			
		||||
import { InjectRepository } from '@nestjs/typeorm';
 | 
			
		||||
@@ -10,8 +9,31 @@ import { gitP } from 'simple-git';
 | 
			
		||||
import { Repository } from 'typeorm';
 | 
			
		||||
import { Project } from '../projects/project.entity';
 | 
			
		||||
import { ListBranchesArgs } from './dtos/list-branches.args';
 | 
			
		||||
import { ListLogsArgs } from './dtos/list-logs.args';
 | 
			
		||||
import { ConfigService } from '@nestjs/config';
 | 
			
		||||
import { Commit } from './dtos/log-list.model';
 | 
			
		||||
import { Nack, RabbitRPC, RabbitSubscribe } from '@golevelup/nestjs-rabbitmq';
 | 
			
		||||
import { Pipeline } from '../pipelines/pipeline.entity';
 | 
			
		||||
import { InjectPinoLogger, PinoLogger } from 'nestjs-pino';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_REPO,
 | 
			
		||||
  QUEUE_FETCH,
 | 
			
		||||
  QUEUE_LIST_COMMITS,
 | 
			
		||||
  QUEUE_REFRESH_REPO,
 | 
			
		||||
  ROUTE_FETCH,
 | 
			
		||||
  ROUTE_LIST_COMMITS,
 | 
			
		||||
} from './repos.constants';
 | 
			
		||||
import { getSelfInstanceQueueKey } from '../commons/utils/rabbit-mq';
 | 
			
		||||
import {
 | 
			
		||||
  getInstanceName,
 | 
			
		||||
  getSelfInstanceRouteKey,
 | 
			
		||||
} from '../commons/utils/rabbit-mq';
 | 
			
		||||
import { ApplicationException } from '../commons/exceptions/application.exception';
 | 
			
		||||
import {
 | 
			
		||||
  EXCHANGE_PROJECT_FANOUT,
 | 
			
		||||
  ROUTE_PROJECT_CHANGE,
 | 
			
		||||
} from '../projects/projects.constants';
 | 
			
		||||
import { RedisMutexService } from '../commons/redis-mutex/redis-mutex.service';
 | 
			
		||||
import { rm } from 'fs/promises';
 | 
			
		||||
 | 
			
		||||
const DEFAULT_REMOTE_NAME = 'origin';
 | 
			
		||||
const INFO_PATH = '@info';
 | 
			
		||||
@@ -21,6 +43,9 @@ export class ReposService {
 | 
			
		||||
    @InjectRepository(Project)
 | 
			
		||||
    private readonly projectRepository: Repository<Project>,
 | 
			
		||||
    private readonly configService: ConfigService,
 | 
			
		||||
    @InjectPinoLogger(ReposService.name)
 | 
			
		||||
    private readonly logger: PinoLogger,
 | 
			
		||||
    private readonly redisMutexService: RedisMutexService,
 | 
			
		||||
  ) {}
 | 
			
		||||
 | 
			
		||||
  getWorkspaceRoot(project: Project): string {
 | 
			
		||||
@@ -31,7 +56,11 @@ export class ReposService {
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async getGit(project: Project, workspaceRoot?: string) {
 | 
			
		||||
  async getGit(
 | 
			
		||||
    project: Project,
 | 
			
		||||
    workspaceRoot?: string,
 | 
			
		||||
    { fetch = true } = {},
 | 
			
		||||
  ) {
 | 
			
		||||
    if (!workspaceRoot) {
 | 
			
		||||
      workspaceRoot = this.getWorkspaceRoot(project);
 | 
			
		||||
    }
 | 
			
		||||
@@ -44,7 +73,9 @@ export class ReposService {
 | 
			
		||||
      await git.init();
 | 
			
		||||
      await git.addRemote(DEFAULT_REMOTE_NAME, project.sshUrl);
 | 
			
		||||
    }
 | 
			
		||||
    await git.fetch();
 | 
			
		||||
    if (fetch) {
 | 
			
		||||
      await git.fetch();
 | 
			
		||||
    }
 | 
			
		||||
    return git;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
@@ -89,4 +120,113 @@ export class ReposService {
 | 
			
		||||
      encodeURIComponent(`${task.pipeline.name}-${task.commit}`),
 | 
			
		||||
    );
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async checkout4Task(task: PipelineTask): Promise<string> {
 | 
			
		||||
    const path = this.getWorkspaceRootByTask(task);
 | 
			
		||||
    await this.checkout(task, path);
 | 
			
		||||
    return path;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @RabbitRPC({
 | 
			
		||||
    exchange: EXCHANGE_REPO,
 | 
			
		||||
    routingKey: [
 | 
			
		||||
      ROUTE_LIST_COMMITS,
 | 
			
		||||
      getSelfInstanceRouteKey(ROUTE_LIST_COMMITS),
 | 
			
		||||
    ],
 | 
			
		||||
    queue: getSelfInstanceQueueKey(QUEUE_LIST_COMMITS),
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async listCommits(pipeline: Pipeline): Promise<[Error, Commit[]?]> {
 | 
			
		||||
    const git = await this.getGit(pipeline.project, undefined, {
 | 
			
		||||
      fetch: false,
 | 
			
		||||
    });
 | 
			
		||||
    try {
 | 
			
		||||
      const data = await git.log([
 | 
			
		||||
        '-100',
 | 
			
		||||
        '--branches',
 | 
			
		||||
        `remotes/origin/${pipeline.branch}`,
 | 
			
		||||
        '--',
 | 
			
		||||
      ]);
 | 
			
		||||
      return [
 | 
			
		||||
        null,
 | 
			
		||||
        data.all.map(
 | 
			
		||||
          (it) =>
 | 
			
		||||
            ({
 | 
			
		||||
              ...it,
 | 
			
		||||
              date: new Date(it.date),
 | 
			
		||||
            } as Commit),
 | 
			
		||||
        ),
 | 
			
		||||
      ];
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      this.logger.error(
 | 
			
		||||
        { error, pipeline },
 | 
			
		||||
        '[listCommits] %s',
 | 
			
		||||
        error?.message,
 | 
			
		||||
      );
 | 
			
		||||
      return [new ApplicationException(error)];
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @RabbitRPC({
 | 
			
		||||
    exchange: EXCHANGE_REPO,
 | 
			
		||||
    routingKey: [ROUTE_FETCH, getSelfInstanceRouteKey(ROUTE_FETCH)],
 | 
			
		||||
    queue: getSelfInstanceQueueKey(QUEUE_FETCH),
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async fetch(pipeline: Pipeline): Promise<string | null | Nack> {
 | 
			
		||||
    const unlock = await this.redisMutexService.lock(
 | 
			
		||||
      `repo-project-${pipeline.projectId}`,
 | 
			
		||||
    );
 | 
			
		||||
    try {
 | 
			
		||||
      const git = await this.getGit(pipeline.project, undefined, {
 | 
			
		||||
        fetch: false,
 | 
			
		||||
      });
 | 
			
		||||
      await git.fetch(['origin', pipeline.branch, '--depth=100']);
 | 
			
		||||
      return getInstanceName();
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      this.logger.error({ error, pipeline }, '[fetch] %s', error?.message);
 | 
			
		||||
      return new Nack();
 | 
			
		||||
    } finally {
 | 
			
		||||
      await unlock();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  @RabbitSubscribe({
 | 
			
		||||
    exchange: EXCHANGE_PROJECT_FANOUT,
 | 
			
		||||
    routingKey: ROUTE_PROJECT_CHANGE,
 | 
			
		||||
    queue: QUEUE_REFRESH_REPO,
 | 
			
		||||
    queueOptions: {
 | 
			
		||||
      autoDelete: true,
 | 
			
		||||
      durable: true,
 | 
			
		||||
    },
 | 
			
		||||
  })
 | 
			
		||||
  async refreshRepo([project]: [Project]) {
 | 
			
		||||
    this.logger.info({ project }, '[refreshRepo] start');
 | 
			
		||||
    const unlock = await this.redisMutexService.lock(
 | 
			
		||||
      `repo-project-${project.id}`,
 | 
			
		||||
      {
 | 
			
		||||
        timeout: null,
 | 
			
		||||
      },
 | 
			
		||||
    );
 | 
			
		||||
    try {
 | 
			
		||||
      const path = join(
 | 
			
		||||
        this.configService.get<string>('workspaces.root'),
 | 
			
		||||
        encodeURIComponent(project.name),
 | 
			
		||||
      );
 | 
			
		||||
      await rm(path, { recursive: true });
 | 
			
		||||
      this.logger.info({ project }, '[refreshRepo] success');
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      this.logger.error(
 | 
			
		||||
        { project, error },
 | 
			
		||||
        '[refreshRepo] failed. $s',
 | 
			
		||||
        error.message,
 | 
			
		||||
      );
 | 
			
		||||
    } finally {
 | 
			
		||||
      await unlock();
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,6 @@ import { PipelineTasksModule } from '../pipeline-tasks/pipeline-tasks.module';
 | 
			
		||||
import { GiteaWebhooksController } from './gitea-webhooks.controller';
 | 
			
		||||
import { WebhookLog } from './webhook-log.entity';
 | 
			
		||||
import { WebhooksService } from './webhooks.service';
 | 
			
		||||
import { raw } from 'body-parser';
 | 
			
		||||
 | 
			
		||||
@Module({
 | 
			
		||||
  imports: [TypeOrmModule.forFeature([WebhookLog]), PipelineTasksModule],
 | 
			
		||||
@@ -12,9 +11,4 @@ import { raw } from 'body-parser';
 | 
			
		||||
  providers: [WebhooksService],
 | 
			
		||||
})
 | 
			
		||||
export class WebhooksModule {
 | 
			
		||||
  // configure(consumer: MiddlewareConsumer) {
 | 
			
		||||
  //   consumer
 | 
			
		||||
  //     .apply(raw({ type: 'application/json' }))
 | 
			
		||||
  //     .forRoutes(GiteaWebhooksController);
 | 
			
		||||
  // }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user