feat: create feature log scheduling activity
parent
0b5aeff89d
commit
d8a7488f46
|
@ -52,4 +52,5 @@ export enum TABLE_NAME {
|
||||||
|
|
||||||
DATA_SCHEDULING = 'event_scheduling',
|
DATA_SCHEDULING = 'event_scheduling',
|
||||||
DATA_SCHEDULING_DEFAULT = 'event_scheduling_default',
|
DATA_SCHEDULING_DEFAULT = 'event_scheduling_default',
|
||||||
|
DATA_SCHEDULING_LOG = 'event_scheduling_log',
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class DataSchedulingLog1752146975330 implements MigrationInterface {
|
||||||
|
name = 'DataSchedulingLog1752146975330';
|
||||||
|
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TYPE "public"."event_scheduling_log_status_enum" AS ENUM('active', 'cancel', 'confirmed', 'draft', 'expired', 'inactive', 'partial refund', 'pending', 'proses refund', 'refunded', 'rejected', 'settled', 'waiting')`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TYPE "public"."event_scheduling_log_type_enum" AS ENUM('Default Percentage', 'Data Scheduling')`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TYPE "public"."event_scheduling_log_action_enum" AS ENUM('CREATE', 'UPDATE', 'DELETE', 'CHANGE_STATUS')`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TABLE "event_scheduling_log" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "creator_id" character varying(36), "creator_name" character varying(125), "editor_id" character varying(36), "editor_name" character varying(125), "created_at" bigint NOT NULL, "updated_at" bigint NOT NULL, "status" "public"."event_scheduling_log_status_enum" NOT NULL DEFAULT 'draft', "type" "public"."event_scheduling_log_type_enum" NOT NULL, "action" "public"."event_scheduling_log_action_enum" NOT NULL, "log_created_at" bigint NOT NULL, "data_id" character varying, "name" character varying, "indexing_key" character varying, "schedule_date_from" date, "schedule_date_to" date, "default_value" integer, "description" text, CONSTRAINT "PK_984247db566636baacab18f593a" PRIMARY KEY ("id"))`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`DROP TABLE "event_scheduling_log"`);
|
||||||
|
await queryRunner.query(
|
||||||
|
`DROP TYPE "public"."event_scheduling_log_action_enum"`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`DROP TYPE "public"."event_scheduling_log_type_enum"`,
|
||||||
|
);
|
||||||
|
await queryRunner.query(
|
||||||
|
`DROP TYPE "public"."event_scheduling_log_status_enum"`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,15 +30,21 @@ import { DataSchedulingDefaultModel } from './data/models/data-scheduling-defaul
|
||||||
import { DataSchedulingManager } from './domain/usecases/managers/data-scheduling-default.manager';
|
import { DataSchedulingManager } from './domain/usecases/managers/data-scheduling-default.manager';
|
||||||
import { SetupSchedulingGuard } from './infrastructure/guards/setup-scheduling.guard';
|
import { SetupSchedulingGuard } from './infrastructure/guards/setup-scheduling.guard';
|
||||||
|
|
||||||
|
import { DataSchedulingChangeStatusHandler } from './domain/usecases/handlers/data-scheduling-change-status.handler';
|
||||||
|
import { DataSchedulingCreatedHandler } from './domain/usecases/handlers/data-scheduling-created.handler';
|
||||||
|
import { DataSchedulingDeletedHandler } from './domain/usecases/handlers/data-scheduling-deleted.handler';
|
||||||
|
import { DataSchedulingUpdatedHandler } from './domain/usecases/handlers/data-scheduling-updated.handler';
|
||||||
|
|
||||||
import { JwtModule } from '@nestjs/jwt';
|
import { JwtModule } from '@nestjs/jwt';
|
||||||
import { JWT_EXPIRED } from 'src/core/sessions/constants';
|
import { JWT_EXPIRED } from 'src/core/sessions/constants';
|
||||||
import { JWT_SECRET } from 'src/core/sessions/constants';
|
import { JWT_SECRET } from 'src/core/sessions/constants';
|
||||||
|
import { DataSchedulingLogModel } from './data/models/data-scheduling-log.model';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
ConfigModule.forRoot(),
|
ConfigModule.forRoot(),
|
||||||
TypeOrmModule.forFeature(
|
TypeOrmModule.forFeature(
|
||||||
[DataSchedulingModel, DataSchedulingDefaultModel],
|
[DataSchedulingModel, DataSchedulingDefaultModel, DataSchedulingLogModel],
|
||||||
CONNECTION_NAME.DEFAULT,
|
CONNECTION_NAME.DEFAULT,
|
||||||
),
|
),
|
||||||
JwtModule.register({
|
JwtModule.register({
|
||||||
|
@ -75,6 +81,11 @@ import { JWT_SECRET } from 'src/core/sessions/constants';
|
||||||
DataSchedulingReadOrchestrator,
|
DataSchedulingReadOrchestrator,
|
||||||
|
|
||||||
DataSchedulingManager,
|
DataSchedulingManager,
|
||||||
|
|
||||||
|
DataSchedulingChangeStatusHandler,
|
||||||
|
DataSchedulingCreatedHandler,
|
||||||
|
DataSchedulingDeletedHandler,
|
||||||
|
DataSchedulingUpdatedHandler,
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
export class DataSchedulingModule {}
|
export class DataSchedulingModule {}
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
import { TABLE_NAME } from 'src/core/strings/constants/table.constants';
|
||||||
|
import {
|
||||||
|
DataSchedulingLogEntity,
|
||||||
|
SCHEDULING_LOG_ACTION_ENUM,
|
||||||
|
SCHEDULING_LOG_TYPE_ENUM,
|
||||||
|
} from '../../domain/entities/data-scheduling.entity';
|
||||||
|
import { Column, Entity } from 'typeorm';
|
||||||
|
import { BaseStatusModel } from 'src/core/modules/data/model/base-status.model';
|
||||||
|
|
||||||
|
@Entity(TABLE_NAME.DATA_SCHEDULING_LOG)
|
||||||
|
export class DataSchedulingLogModel
|
||||||
|
extends BaseStatusModel<DataSchedulingLogEntity>
|
||||||
|
implements DataSchedulingLogEntity
|
||||||
|
{
|
||||||
|
@Column({ type: 'enum', enum: SCHEDULING_LOG_TYPE_ENUM, nullable: false })
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM;
|
||||||
|
|
||||||
|
@Column({ type: 'enum', enum: SCHEDULING_LOG_ACTION_ENUM, nullable: false })
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM;
|
||||||
|
|
||||||
|
@Column({ type: 'bigint', nullable: false })
|
||||||
|
log_created_at: number;
|
||||||
|
|
||||||
|
@Column('varchar', { name: 'data_id', nullable: true })
|
||||||
|
data_id: string;
|
||||||
|
|
||||||
|
@Column('varchar', { name: 'name', nullable: true })
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
@Column('varchar', { name: 'indexing_key', nullable: true })
|
||||||
|
indexing_key: string;
|
||||||
|
|
||||||
|
@Column('date', { name: 'schedule_date_from', nullable: true })
|
||||||
|
schedule_date_from: Date;
|
||||||
|
|
||||||
|
@Column('date', { name: 'schedule_date_to', nullable: true })
|
||||||
|
schedule_date_to: Date;
|
||||||
|
|
||||||
|
@Column('int', { nullable: true })
|
||||||
|
default_value: number;
|
||||||
|
|
||||||
|
@Column('text', { name: 'description', nullable: true })
|
||||||
|
description: string;
|
||||||
|
}
|
|
@ -1,5 +1,6 @@
|
||||||
import { BaseStatusEntity } from 'src/core/modules/domain/entities/base-status.entity';
|
import { BaseStatusEntity } from 'src/core/modules/domain/entities/base-status.entity';
|
||||||
import { BaseEntity } from 'src/core/modules/domain/entities/base.entity';
|
import { BaseEntity } from 'src/core/modules/domain/entities/base.entity';
|
||||||
|
import { STATUS } from 'src/core/strings/constants/base.constants';
|
||||||
|
|
||||||
export interface DataSchedulingEntity extends BaseStatusEntity {
|
export interface DataSchedulingEntity extends BaseStatusEntity {
|
||||||
name: string;
|
name: string;
|
||||||
|
@ -15,3 +16,32 @@ export interface DataSchedulingDefaultEntity extends BaseEntity {
|
||||||
export interface DataSchedulingActiveEntity {
|
export interface DataSchedulingActiveEntity {
|
||||||
value: number;
|
value: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum SCHEDULING_LOG_TYPE_ENUM {
|
||||||
|
DEFAULT_PERCENTAGE = 'Default Percentage',
|
||||||
|
DATA_SCHEDULING = 'Data Scheduling',
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum SCHEDULING_LOG_ACTION_ENUM {
|
||||||
|
CREATE = 'CREATE',
|
||||||
|
UPDATE = 'UPDATE',
|
||||||
|
DELETE = 'DELETE',
|
||||||
|
CHANGE_STATUS = 'CHANGE_STATUS',
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DataSchedulingLogEntity extends BaseStatusEntity {
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM;
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM;
|
||||||
|
log_created_at: number;
|
||||||
|
|
||||||
|
data_id?: string;
|
||||||
|
|
||||||
|
name?: string;
|
||||||
|
indexing_key?: number | string;
|
||||||
|
schedule_date_from?: Date;
|
||||||
|
schedule_date_to?: Date;
|
||||||
|
|
||||||
|
default_value?: number;
|
||||||
|
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
import { EventsHandler, IEventHandler } from '@nestjs/cqrs';
|
||||||
|
import { DataSchedulingChangeStatusEvent } from '../../entities/event/data-scheduling-change-status.event';
|
||||||
|
import { capitalizeEachWord } from 'src/modules/reports/shared/helpers';
|
||||||
|
import {
|
||||||
|
DataSchedulingLogEntity,
|
||||||
|
SCHEDULING_LOG_ACTION_ENUM,
|
||||||
|
SCHEDULING_LOG_TYPE_ENUM,
|
||||||
|
} from '../../entities/data-scheduling.entity';
|
||||||
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
|
import { DataSchedulingLogModel } from '../../../data/models/data-scheduling-log.model';
|
||||||
|
import { Repository } from 'typeorm';
|
||||||
|
import { Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
@EventsHandler(DataSchedulingChangeStatusEvent)
|
||||||
|
export class DataSchedulingChangeStatusHandler
|
||||||
|
implements IEventHandler<DataSchedulingChangeStatusEvent>
|
||||||
|
{
|
||||||
|
private readonly logger = new Logger(DataSchedulingChangeStatusHandler.name);
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
@InjectRepository(DataSchedulingLogModel)
|
||||||
|
private repository: Repository<DataSchedulingLogModel>,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async handle(event: DataSchedulingChangeStatusEvent) {
|
||||||
|
const oldData = event?.data?.old;
|
||||||
|
const newData = event?.data?.data;
|
||||||
|
|
||||||
|
const oldStatus = capitalizeEachWord(oldData?.status);
|
||||||
|
const newStatus = capitalizeEachWord(newData.status);
|
||||||
|
|
||||||
|
const scheduleName = newData?.name || 'an item';
|
||||||
|
const editorName = newData.editor_name || 'System';
|
||||||
|
const description = `<p><b>${editorName}</b> changed the status of <b>${scheduleName}</b> from <b><i>${oldStatus}</i></b> to <b><i>${newStatus}</i></b>.</p>`;
|
||||||
|
|
||||||
|
const payload: DataSchedulingLogEntity = {
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM.DATA_SCHEDULING,
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM.CHANGE_STATUS,
|
||||||
|
log_created_at: new Date().getTime(),
|
||||||
|
|
||||||
|
data_id: newData?.id,
|
||||||
|
name: newData?.name,
|
||||||
|
indexing_key: newData?.indexing_key,
|
||||||
|
schedule_date_from: newData?.schedule_date_from,
|
||||||
|
schedule_date_to: newData?.schedule_date_to,
|
||||||
|
|
||||||
|
status: newData?.status,
|
||||||
|
creator_id: newData?.creator_id,
|
||||||
|
creator_name: newData?.creator_name,
|
||||||
|
editor_id: newData?.editor_id,
|
||||||
|
editor_name: newData?.editor_name,
|
||||||
|
created_at: newData?.created_at,
|
||||||
|
updated_at: newData?.updated_at,
|
||||||
|
description: description,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.repository.save(payload as any);
|
||||||
|
this.logger.verbose(
|
||||||
|
`[SCHEDULING LOG] Change status data for ID: ${payload.data_id}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
import { EventsHandler, IEventHandler } from '@nestjs/cqrs';
|
||||||
|
import { DataSchedulingCreatedEvent } from '../../entities/event/data-scheduling-created.event';
|
||||||
|
import {
|
||||||
|
DataSchedulingLogEntity,
|
||||||
|
SCHEDULING_LOG_ACTION_ENUM,
|
||||||
|
SCHEDULING_LOG_TYPE_ENUM,
|
||||||
|
} from '../../entities/data-scheduling.entity';
|
||||||
|
import { decryptionTotal } from '../../../infrastructure/helpers';
|
||||||
|
import { DataSchedulingLogModel } from '../../../data/models/data-scheduling-log.model';
|
||||||
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
|
import { Repository } from 'typeorm';
|
||||||
|
import { Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
@EventsHandler(DataSchedulingCreatedEvent)
|
||||||
|
export class DataSchedulingCreatedHandler
|
||||||
|
implements IEventHandler<DataSchedulingCreatedEvent>
|
||||||
|
{
|
||||||
|
private readonly logger = new Logger(DataSchedulingCreatedHandler.name);
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
@InjectRepository(DataSchedulingLogModel)
|
||||||
|
private repository: Repository<DataSchedulingLogModel>,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async handle(event: DataSchedulingCreatedEvent) {
|
||||||
|
const data = event?.data?.data;
|
||||||
|
const totalPercentage = decryptionTotal(data?.indexing_key);
|
||||||
|
|
||||||
|
const scheduleName = data?.name || 'a new schedule';
|
||||||
|
const description = `<p><b>${data.creator_name}</b> created <b>${scheduleName}</b> schedule from <b><i>${data?.schedule_date_from}</i></b> to <b><i>${data.schedule_date_to}</i></b> with a total percentage of <b><i>${totalPercentage}%</i></b>.</p>`;
|
||||||
|
|
||||||
|
const payload: DataSchedulingLogEntity = {
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM.DATA_SCHEDULING,
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM.CREATE,
|
||||||
|
log_created_at: new Date().getTime(),
|
||||||
|
|
||||||
|
data_id: data?.id,
|
||||||
|
name: data?.name,
|
||||||
|
indexing_key: data?.indexing_key,
|
||||||
|
schedule_date_from: data?.schedule_date_from,
|
||||||
|
schedule_date_to: data?.schedule_date_to,
|
||||||
|
|
||||||
|
status: data?.status,
|
||||||
|
creator_id: data?.creator_id,
|
||||||
|
creator_name: data?.creator_name,
|
||||||
|
editor_id: data?.editor_id,
|
||||||
|
editor_name: data?.editor_name,
|
||||||
|
created_at: data?.created_at,
|
||||||
|
updated_at: data?.updated_at,
|
||||||
|
description: description,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.repository.save(payload as any);
|
||||||
|
this.logger.verbose(
|
||||||
|
`[SCHEDULING LOG] Create data for ID: ${payload.data_id}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,64 @@
|
||||||
|
import { EventsHandler, IEventHandler } from '@nestjs/cqrs';
|
||||||
|
import { DataSchedulingDeletedEvent } from '../../entities/event/data-scheduling-deleted.event';
|
||||||
|
import {
|
||||||
|
DataSchedulingLogEntity,
|
||||||
|
SCHEDULING_LOG_ACTION_ENUM,
|
||||||
|
SCHEDULING_LOG_TYPE_ENUM,
|
||||||
|
} from '../../entities/data-scheduling.entity';
|
||||||
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
|
import { DataSchedulingLogModel } from '../../../data/models/data-scheduling-log.model';
|
||||||
|
import { Repository } from 'typeorm';
|
||||||
|
import { Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
@EventsHandler(DataSchedulingDeletedEvent)
|
||||||
|
export class DataSchedulingDeletedHandler
|
||||||
|
implements IEventHandler<DataSchedulingDeletedEvent>
|
||||||
|
{
|
||||||
|
private readonly logger = new Logger(DataSchedulingDeletedHandler.name);
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
@InjectRepository(DataSchedulingLogModel)
|
||||||
|
private repository: Repository<DataSchedulingLogModel>,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async handle(event: DataSchedulingDeletedEvent) {
|
||||||
|
const deletedData = event?.data?.data;
|
||||||
|
const user = event?.data?.user;
|
||||||
|
|
||||||
|
const deleterName =
|
||||||
|
user?.name ||
|
||||||
|
deletedData?.editor_name ||
|
||||||
|
deletedData?.creator_name ||
|
||||||
|
'System';
|
||||||
|
|
||||||
|
const scheduleName = deletedData?.name || 'an item';
|
||||||
|
|
||||||
|
const description = `<p><b>${deleterName}</b> deleted schedule: <b>${scheduleName}</b>.</p>`;
|
||||||
|
|
||||||
|
const payload: DataSchedulingLogEntity = {
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM.DATA_SCHEDULING,
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM.DELETE,
|
||||||
|
log_created_at: new Date().getTime(),
|
||||||
|
|
||||||
|
data_id: deletedData?.id,
|
||||||
|
name: deletedData?.name,
|
||||||
|
indexing_key: deletedData?.indexing_key,
|
||||||
|
schedule_date_from: deletedData?.schedule_date_from,
|
||||||
|
schedule_date_to: deletedData?.schedule_date_to,
|
||||||
|
status: deletedData?.status,
|
||||||
|
|
||||||
|
creator_id: deletedData?.creator_id,
|
||||||
|
creator_name: deletedData?.creator_name,
|
||||||
|
editor_id: deletedData?.editor_id,
|
||||||
|
editor_name: deletedData?.editor_name,
|
||||||
|
created_at: deletedData?.created_at,
|
||||||
|
updated_at: deletedData?.updated_at,
|
||||||
|
description: description,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.repository.save(payload as any);
|
||||||
|
this.logger.verbose(
|
||||||
|
`[SCHEDULING LOG] Delete data for ID: ${payload.data_id}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,153 @@
|
||||||
|
import { EventsHandler, IEventHandler } from '@nestjs/cqrs';
|
||||||
|
import { DataSchedulingUpdatedEvent } from '../../entities/event/data-scheduling-updated.event';
|
||||||
|
import {
|
||||||
|
DataSchedulingLogEntity,
|
||||||
|
SCHEDULING_LOG_ACTION_ENUM,
|
||||||
|
SCHEDULING_LOG_TYPE_ENUM,
|
||||||
|
} from '../../entities/data-scheduling.entity';
|
||||||
|
import {
|
||||||
|
decryptionTotal,
|
||||||
|
encryptionTotal,
|
||||||
|
} from '../../../infrastructure/helpers';
|
||||||
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
|
import { DataSchedulingLogModel } from '../../../data/models/data-scheduling-log.model';
|
||||||
|
import { Repository } from 'typeorm';
|
||||||
|
import { Logger } from '@nestjs/common';
|
||||||
|
|
||||||
|
@EventsHandler(DataSchedulingUpdatedEvent)
|
||||||
|
export class DataSchedulingUpdatedHandler
|
||||||
|
implements IEventHandler<DataSchedulingUpdatedEvent>
|
||||||
|
{
|
||||||
|
private readonly logger = new Logger(DataSchedulingUpdatedHandler.name);
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
@InjectRepository(DataSchedulingLogModel)
|
||||||
|
private repository: Repository<DataSchedulingLogModel>,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// Map for readable labels
|
||||||
|
private readonly labelMap: { [key: string]: string } = {
|
||||||
|
name: 'Name',
|
||||||
|
indexing_key: 'Total Data',
|
||||||
|
schedule_date_from: 'Start Date',
|
||||||
|
schedule_date_to: 'End Date',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Relevant keys for comparing changes
|
||||||
|
private readonly keysToCompare: string[] = [
|
||||||
|
'name',
|
||||||
|
'indexing_key',
|
||||||
|
'schedule_date_from',
|
||||||
|
'schedule_date_to',
|
||||||
|
];
|
||||||
|
|
||||||
|
async handle(event: DataSchedulingUpdatedEvent) {
|
||||||
|
const oldData = event?.data?.old;
|
||||||
|
// Decrypt oldData.indexing_key here before comparison
|
||||||
|
if (oldData?.indexing_key !== undefined && oldData?.indexing_key !== null) {
|
||||||
|
oldData.indexing_key = decryptionTotal(oldData.indexing_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
const newData = event?.data?.data;
|
||||||
|
// Decrypt newData.indexing_key here before comparison
|
||||||
|
if (newData?.indexing_key !== undefined && newData?.indexing_key !== null) {
|
||||||
|
newData.indexing_key = decryptionTotal(newData.indexing_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
const changingData = this.getChangingData(oldData, newData);
|
||||||
|
const description = this.generateDescription(
|
||||||
|
oldData,
|
||||||
|
newData,
|
||||||
|
changingData,
|
||||||
|
);
|
||||||
|
|
||||||
|
const payload: DataSchedulingLogEntity = {
|
||||||
|
type: SCHEDULING_LOG_TYPE_ENUM.DATA_SCHEDULING,
|
||||||
|
action: SCHEDULING_LOG_ACTION_ENUM.UPDATE,
|
||||||
|
log_created_at: new Date().getTime(),
|
||||||
|
|
||||||
|
data_id: newData?.id,
|
||||||
|
name: newData?.name,
|
||||||
|
indexing_key: encryptionTotal(newData?.indexing_key),
|
||||||
|
schedule_date_from: newData?.schedule_date_from,
|
||||||
|
schedule_date_to: newData?.schedule_date_to,
|
||||||
|
|
||||||
|
status: newData?.status,
|
||||||
|
creator_id: newData?.creator_id,
|
||||||
|
creator_name: newData?.creator_name,
|
||||||
|
editor_id: newData?.editor_id,
|
||||||
|
editor_name: newData?.editor_name,
|
||||||
|
created_at: newData?.created_at,
|
||||||
|
updated_at: newData?.updated_at,
|
||||||
|
description: description,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.repository.save(payload as any);
|
||||||
|
this.logger.verbose(
|
||||||
|
`[SCHEDULING LOG] Update data for ID: ${payload.data_id}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares old and new data to find changes.
|
||||||
|
* @param oldData Data before the change.
|
||||||
|
* @param newData Data after the change.
|
||||||
|
* @returns An object containing the old and new changed data.
|
||||||
|
*/
|
||||||
|
private getChangingData(oldData: any, newData: any): { old: any; new: any } {
|
||||||
|
const changingData: { old: any; new: any } = { old: {}, new: {} };
|
||||||
|
|
||||||
|
this.keysToCompare.forEach((key) => {
|
||||||
|
// Ensure comparisons are made on decrypted values if decryption happens before this
|
||||||
|
if (oldData?.[key] !== newData?.[key]) {
|
||||||
|
changingData.old[key] = oldData?.[key];
|
||||||
|
changingData.new[key] = newData?.[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return changingData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates an HTML description string based on data changes.
|
||||||
|
* Includes the name from oldData for identification.
|
||||||
|
* @param oldData Old data, used to get the name of the item.
|
||||||
|
* @param newData New data containing editor information.
|
||||||
|
* @param changingData An object containing the changed data.
|
||||||
|
* @returns The HTML string of the change description.
|
||||||
|
*/
|
||||||
|
private generateDescription(
|
||||||
|
oldData: any,
|
||||||
|
newData: any,
|
||||||
|
changingData: { old: any; new: any },
|
||||||
|
): string {
|
||||||
|
const editorName = newData.editor_name || 'System';
|
||||||
|
const itemName = oldData?.name || 'an item';
|
||||||
|
|
||||||
|
let description = `<p><b>${editorName}</b> has updated schedule for <b>${itemName}</b>.`;
|
||||||
|
|
||||||
|
if (Object.keys(changingData.old).length > 0) {
|
||||||
|
description += ` Change details:<ul>`;
|
||||||
|
for (const key in changingData.old) {
|
||||||
|
if (Object.prototype.hasOwnProperty.call(changingData.old, key)) {
|
||||||
|
const label = this.labelMap[key] || key;
|
||||||
|
let oldValue = changingData.old[key] || 'empty';
|
||||||
|
let newValue = changingData.new[key] || 'empty';
|
||||||
|
|
||||||
|
// Add '%' suffix if the key is 'indexing_key'
|
||||||
|
if (key === 'indexing_key') {
|
||||||
|
oldValue = `${oldValue}%`;
|
||||||
|
newValue = `${newValue}%`;
|
||||||
|
}
|
||||||
|
|
||||||
|
description += `<li><b>${label}</b> changed from <i>${oldValue}</i> to <i>${newValue}</i>.</li>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
description += `</ul></p>`;
|
||||||
|
} else {
|
||||||
|
description += ` No significant data detail changes.</p>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue