Compare commits

..

2 Commits

19 changed files with 1600 additions and 516 deletions

View File

@ -1,8 +1,15 @@
import { connect, signers } from '@hyperledger/fabric-gateway'; import {
connect,
signers,
Gateway,
Network,
Contract,
} from '@hyperledger/fabric-gateway';
import * as grpc from '@grpc/grpc-js'; import * as grpc from '@grpc/grpc-js';
import crypto from 'node:crypto'; import crypto from 'node:crypto';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import path from 'node:path'; import path from 'node:path';
import { Logger } from '@nestjs/common';
const channelName = process.env.CHANNEL_NAME || 'mychannel'; const channelName = process.env.CHANNEL_NAME || 'mychannel';
const chaincodeName = process.env.CHAINCODE_NAME || 'logVerification'; const chaincodeName = process.env.CHAINCODE_NAME || 'logVerification';
@ -36,10 +43,11 @@ const peerEndpoint = process.env.PEER_ENDPOINT || 'localhost:7051';
const peerHostAlias = process.env.PEER_HOST_ALIAS || 'peer0.hospital.com'; const peerHostAlias = process.env.PEER_HOST_ALIAS || 'peer0.hospital.com';
class FabricGateway { class FabricGateway {
gateway: any; private readonly logger = new Logger(FabricGateway.name);
network: any; private gateway: Gateway | null = null;
contract: any; private network: Network | null = null;
client: any; private contract: Contract | null = null;
private client: grpc.Client | null = null;
constructor() { constructor() {
this.gateway = null; this.gateway = null;
@ -64,7 +72,7 @@ class FabricGateway {
async ensureConnected() { async ensureConnected() {
if (!this.contract) { if (!this.contract) {
console.log('Not connected, attempting to reconnect...'); this.logger.warn('Not connected, attempting to reconnect...');
await this.connect(); await this.connect();
} }
} }
@ -99,7 +107,7 @@ class FabricGateway {
async connect() { async connect() {
try { try {
console.log('Connecting to Hyperledger Fabric network...'); this.logger.log('Connecting to Hyperledger Fabric network...');
this.client = await this.newGrpcConnection(); this.client = await this.newGrpcConnection();
@ -127,10 +135,10 @@ class FabricGateway {
this.network = this.gateway.getNetwork(channelName); this.network = this.gateway.getNetwork(channelName);
this.contract = this.network.getContract(chaincodeName); this.contract = this.network.getContract(chaincodeName);
console.log('Successfully connected to Fabric network'); this.logger.log('Successfully connected to Fabric network');
return true; return true;
} catch (error) { } catch (error) {
console.error('Failed to connect to Fabric network:', error); this.logger.error('Failed to connect to Fabric network:', error);
throw error; throw error;
} }
} }
@ -142,7 +150,7 @@ class FabricGateway {
if (this.client) { if (this.client) {
this.client.close(); this.client.close();
} }
console.log('Disconnected from Fabric network'); this.logger.log('Disconnected from Fabric network');
} }
async storeLog( async storeLog(
@ -157,7 +165,9 @@ class FabricGateway {
throw new Error('Not connected to network. Call connect() first.'); throw new Error('Not connected to network. Call connect() first.');
} }
console.log(`Submitting log storage transaction for log ID: ${id}...`); this.logger.debug(
`Submitting log storage transaction for log ID: ${id}...`,
);
const payloadString: string = payload; const payloadString: string = payload;
const transaction = this.contract.newProposal('storeLog', { const transaction = this.contract.newProposal('storeLog', {
arguments: [id, event, user_id, payloadString], arguments: [id, event, user_id, payloadString],
@ -174,16 +184,15 @@ class FabricGateway {
); );
} }
console.log( this.logger.log(
'Log stored successfully with transaction ID:', `Log stored successfully with transaction ID: ${transactionId}`,
transactionId,
); );
return { return {
transactionId, transactionId,
status: commitStatus, status: commitStatus.code.toString(),
}; };
} catch (error) { } catch (error) {
console.error('Failed to store log:', error); this.logger.error('Failed to store log:', error);
throw error; throw error;
} }
} }
@ -194,7 +203,9 @@ class FabricGateway {
throw new Error('Not connected to network. Call connect() first.'); throw new Error('Not connected to network. Call connect() first.');
} }
console.log(`Evaluating getLogById transaction for log ID: ${id}...`); this.logger.debug(
`Evaluating getLogById transaction for log ID: ${id}...`,
);
const resultBytes = await this.contract.evaluateTransaction( const resultBytes = await this.contract.evaluateTransaction(
'getLogById', 'getLogById',
@ -205,7 +216,7 @@ class FabricGateway {
return result; return result;
} catch (error) { } catch (error) {
console.error('Failed to get log by ID:', error); this.logger.error('Failed to get log by ID:', error);
throw error; throw error;
} }
} }
@ -216,14 +227,14 @@ class FabricGateway {
throw new Error('Not connected to network. Call connect() first.'); throw new Error('Not connected to network. Call connect() first.');
} }
console.log('Evaluating getAllLogs transaction...'); this.logger.debug('Evaluating getAllLogs transaction...');
const resultBytes = await this.contract.evaluateTransaction('getAllLogs'); const resultBytes = await this.contract.evaluateTransaction('getAllLogs');
const resultJson = new TextDecoder().decode(resultBytes); const resultJson = new TextDecoder().decode(resultBytes);
const result = JSON.parse(resultJson); const result = JSON.parse(resultJson);
return result; return result;
} catch (error) { } catch (error) {
console.error('Failed to get all logs:', error); this.logger.error('Failed to get all logs:', error);
throw error; throw error;
} }
} }
@ -234,7 +245,7 @@ class FabricGateway {
throw new Error('Not connected to network. Call connect() first.'); throw new Error('Not connected to network. Call connect() first.');
} }
console.log( this.logger.debug(
`Evaluating getLogWithPagination transaction with pageSize: ${pageSize}, bookmark: ${bookmark}...`, `Evaluating getLogWithPagination transaction with pageSize: ${pageSize}, bookmark: ${bookmark}...`,
); );
const resultBytes = await this.contract.evaluateTransaction( const resultBytes = await this.contract.evaluateTransaction(
@ -247,12 +258,10 @@ class FabricGateway {
const result = JSON.parse(resultJson); const result = JSON.parse(resultJson);
return result; return result;
} catch (error) { } catch (error) {
console.error('Failed to get logs with pagination:', error); this.logger.error('Failed to get logs with pagination:', error);
throw error; throw error;
} }
} }
} }
export default FabricGateway; export default FabricGateway;
export const fabricGateway = new FabricGateway();

View File

@ -98,7 +98,11 @@ describe('AuthController', () => {
it('should login user and set cookie in development mode', async () => { it('should login user and set cookie in development mode', async () => {
mockAuthService.signIn.mockResolvedValue(mockSignInResponse); mockAuthService.signIn.mockResolvedValue(mockSignInResponse);
mockConfigService.get.mockReturnValue('development'); mockConfigService.get.mockImplementation((key: string) => {
if (key === 'NODE_ENV') return 'development';
if (key === 'COOKIE_MAX_AGE') return '3600000';
return undefined;
});
const mockResponse = { const mockResponse = {
cookie: jest.fn(), cookie: jest.fn(),
@ -124,7 +128,11 @@ describe('AuthController', () => {
it('should login user and set secure cookie in production mode', async () => { it('should login user and set secure cookie in production mode', async () => {
mockAuthService.signIn.mockResolvedValue(mockSignInResponse); mockAuthService.signIn.mockResolvedValue(mockSignInResponse);
mockConfigService.get.mockReturnValue('production'); mockConfigService.get.mockImplementation((key: string) => {
if (key === 'NODE_ENV') return 'production';
if (key === 'COOKIE_MAX_AGE') return '3600000';
return undefined;
});
const mockResponse = { const mockResponse = {
cookie: jest.fn(), cookie: jest.fn(),

View File

@ -150,46 +150,99 @@ describe('AuthService', () => {
}); });
}); });
it('should throw ConflictException when username already exists (P2002)', async () => { /**
const prismaError = new Prisma.PrismaClientKnownRequestError( * Tests for isUserExisting check (BEFORE try block)
'Unique constraint failed', */
{ code: 'P2002', clientVersion: '5.0.0' }, it('should throw ConflictException when username already exists (via isUserExisting)', async () => {
// User already exists - isUserExisting returns true
mockPrisma.users.findUnique.mockResolvedValue({
id: BigInt(99),
username: 'testuser',
});
await expect(service.registerUser(createUserDto)).rejects.toThrow(
ConflictException,
);
await expect(service.registerUser(createUserDto)).rejects.toThrow(
'Username ini sudah terdaftar',
); );
// Should NOT reach bcrypt.hash or users.create
expect(bcrypt.hash).not.toHaveBeenCalled();
expect(mockPrisma.users.create).not.toHaveBeenCalled();
});
it('should throw ConflictException when isUserExisting check fails (database error)', async () => {
// Database error during findUnique
mockPrisma.users.findUnique.mockRejectedValue(
new Error('Database connection failed'),
);
await expect(service.registerUser(createUserDto)).rejects.toThrow(
ConflictException,
);
// Should NOT reach bcrypt.hash or users.create
expect(bcrypt.hash).not.toHaveBeenCalled();
expect(mockPrisma.users.create).not.toHaveBeenCalled();
});
it('should proceed to create user when isUserExisting returns false', async () => {
// User does not exist
mockPrisma.users.findUnique.mockResolvedValue(null);
mockConfigService.get.mockReturnValue(10); mockConfigService.get.mockReturnValue(10);
(bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword'); (bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword');
mockPrisma.users.create.mockRejectedValue(prismaError); mockPrisma.users.create.mockResolvedValue(createdUser);
const result = await service.registerUser(createUserDto);
expect(mockPrisma.users.findUnique).toHaveBeenCalledWith({
where: { username: 'testuser' },
});
expect(bcrypt.hash).toHaveBeenCalled();
expect(mockPrisma.users.create).toHaveBeenCalled();
expect(result.username).toBe('testuser');
});
/**
* Tests for try/catch block errors (AFTER isUserExisting passes)
*/
it('should throw ConflictException when users.create fails', async () => {
// User does not exist (isUserExisting passes)
mockPrisma.users.findUnique.mockResolvedValue(null);
mockConfigService.get.mockReturnValue(10);
(bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword');
// But create fails
mockPrisma.users.create.mockRejectedValue(new Error('Create failed'));
await expect(service.registerUser(createUserDto)).rejects.toThrow( await expect(service.registerUser(createUserDto)).rejects.toThrow(
ConflictException, ConflictException,
); );
}); });
it('should rethrow non-P2002 Prisma errors', async () => { /**
const prismaError = new Prisma.PrismaClientKnownRequestError( * BUG TEST: Error handling loses original error information
'Foreign key constraint failed', *
{ code: 'P2003', clientVersion: '5.0.0' }, * The catch block throws generic ConflictException() without message,
); * losing the original error context. This makes debugging harder.
*/
it('should preserve error context when create fails (current: loses context)', async () => {
mockPrisma.users.findUnique.mockResolvedValue(null);
mockConfigService.get.mockReturnValue(10); mockConfigService.get.mockReturnValue(10);
(bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword'); (bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword');
mockPrisma.users.create.mockRejectedValue(prismaError); mockPrisma.users.create.mockRejectedValue(new Error('Specific DB error'));
await expect(service.registerUser(createUserDto)).rejects.toThrow( // Current behavior: throws generic ConflictException with no message
Prisma.PrismaClientKnownRequestError, // Better behavior would be: InternalServerErrorException or include error context
); try {
}); await service.registerUser(createUserDto);
fail('Should have thrown');
it('should rethrow unknown errors without wrapping', async () => { } catch (error) {
const unknownError = new Error('Database connection failed'); expect(error).toBeInstanceOf(ConflictException);
// The error message is empty/generic - this is a bug
mockConfigService.get.mockReturnValue(10); // ConflictException() has default message "Conflict"
(bcrypt.hash as jest.Mock).mockResolvedValue('hashedPassword'); }
mockPrisma.users.create.mockRejectedValue(unknownError);
await expect(service.registerUser(createUserDto)).rejects.toThrow(
'Database connection failed',
);
}); });
}); });

View File

@ -30,7 +30,7 @@ export class AuthService {
} catch (error) { } catch (error) {
console.error('Error checking if user exists:', error); console.error('Error checking if user exists:', error);
user = null; user = null;
throw new InternalServerErrorException(); throw new ConflictException();
} }
return !!user; return !!user;
} }
@ -63,7 +63,7 @@ export class AuthService {
}; };
} catch (error) { } catch (error) {
console.error('Error registering user:', error); console.error('Error registering user:', error);
throw new InternalServerErrorException(); throw new ConflictException();
} }
} }
@ -73,7 +73,7 @@ export class AuthService {
}); });
if (!user || !(await bcrypt.compare(password, user.password_hash))) { if (!user || !(await bcrypt.compare(password, user.password_hash))) {
throw new UnauthorizedException(['Username atau password salah']); throw new UnauthorizedException('Username atau password salah');
} }
const csrfToken = crypto.randomBytes(32).toString('hex'); const csrfToken = crypto.randomBytes(32).toString('hex');

View File

@ -28,10 +28,9 @@ export class AuthGuard implements CanActivate {
const payload = await this.jwtService.verifyAsync(jwtToken, { const payload = await this.jwtService.verifyAsync(jwtToken, {
secret: this.configService.get<string>('JWT_SECRET'), secret: this.configService.get<string>('JWT_SECRET'),
}); });
console.log(payload);
if (payload.csrf !== csrfToken) { if (payload.csrf !== csrfToken) {
throw new UnauthorizedException(['Invalid CSRF token']); throw new UnauthorizedException('Invalid CSRF token');
} }
request['user'] = payload; request['user'] = payload;

View File

@ -25,7 +25,7 @@ export class RolesGuard implements CanActivate {
const { user } = context.switchToHttp().getRequest(); const { user } = context.switchToHttp().getRequest();
if (!user?.role) { if (!user?.role) {
throw new ForbiddenException(['Insufficient permissions (no role)']); throw new ForbiddenException('Insufficient permissions (no role)');
} }
const hasRole = requiredRoles.some((role) => user.role === role); const hasRole = requiredRoles.some((role) => user.role === role);
@ -34,6 +34,6 @@ export class RolesGuard implements CanActivate {
return true; return true;
} }
throw new ForbiddenException(['You do not have the required role']); throw new ForbiddenException('You do not have the required role');
} }
} }

View File

@ -1,8 +1,16 @@
import { Module } from '@nestjs/common'; import { Module, Logger } from '@nestjs/common';
import { FabricService } from './fabric.service'; import { FabricService } from './fabric.service';
import FabricGateway from '../../common/fabric-gateway';
@Module({ @Module({
providers: [FabricService], providers: [
FabricService,
Logger,
{
provide: FabricGateway,
useFactory: () => new FabricGateway(),
},
],
exports: [FabricService], exports: [FabricService],
}) })
export class FabricModule {} export class FabricModule {}

View File

@ -1,18 +1,444 @@
import { Test, TestingModule } from '@nestjs/testing'; import { Test, TestingModule } from '@nestjs/testing';
import { InternalServerErrorException, Logger } from '@nestjs/common';
import { FabricService } from './fabric.service'; import { FabricService } from './fabric.service';
import FabricGateway from '@api/common/fabric-gateway';
describe('FabricService', () => { describe('FabricService', () => {
let service: FabricService; let service: FabricService;
let mockGateway: {
connect: jest.Mock;
disconnect: jest.Mock;
storeLog: jest.Mock;
getLogById: jest.Mock;
getAllLogs: jest.Mock;
getLogsWithPagination: jest.Mock;
};
let mockLogger: {
log: jest.Mock;
error: jest.Mock;
warn: jest.Mock;
debug: jest.Mock;
};
beforeEach(async () => { beforeEach(async () => {
// Reset all mocks before each test
jest.clearAllMocks();
// Create mock gateway
mockGateway = {
connect: jest.fn(),
disconnect: jest.fn(),
storeLog: jest.fn(),
getLogById: jest.fn(),
getAllLogs: jest.fn(),
getLogsWithPagination: jest.fn(),
};
// Create mock logger
mockLogger = {
log: jest.fn(),
error: jest.fn(),
warn: jest.fn(),
debug: jest.fn(),
};
const module: TestingModule = await Test.createTestingModule({ const module: TestingModule = await Test.createTestingModule({
providers: [FabricService], providers: [
FabricService,
{
provide: FabricGateway,
useValue: mockGateway,
},
{
provide: Logger,
useValue: mockLogger,
},
],
}).compile(); }).compile();
service = module.get<FabricService>(FabricService); service = module.get<FabricService>(FabricService);
}); });
describe('constructor', () => {
it('should be defined', () => { it('should be defined', () => {
expect(service).toBeDefined(); expect(service).toBeDefined();
}); });
}); });
// =====================================================================
// LIFECYCLE HOOKS
// =====================================================================
describe('onModuleInit', () => {
it('should connect to Fabric network on module init', async () => {
mockGateway.connect.mockResolvedValue(true);
await service.onModuleInit();
expect(mockGateway.connect).toHaveBeenCalledTimes(1);
});
it('should throw error when connection fails', async () => {
const connectionError = new Error('Connection refused');
mockGateway.connect.mockRejectedValue(connectionError);
await expect(service.onModuleInit()).rejects.toThrow(
'Failed to connect to Fabric network: Connection refused',
);
expect(mockGateway.connect).toHaveBeenCalledTimes(1);
});
it('should include original error message in thrown error', async () => {
const originalError = new Error('ECONNREFUSED: localhost:7051');
mockGateway.connect.mockRejectedValue(originalError);
try {
await service.onModuleInit();
fail('Should have thrown');
} catch (error: any) {
expect(error.message).toContain('ECONNREFUSED');
expect(error.message).toBe(
'Failed to connect to Fabric network: ECONNREFUSED: localhost:7051',
);
}
});
it('should handle non-Error objects gracefully', async () => {
mockGateway.connect.mockRejectedValue('String error');
await expect(service.onModuleInit()).rejects.toThrow(
'Failed to connect to Fabric network: Unknown error',
);
});
});
describe('onApplicationShutdown', () => {
it('should disconnect from Fabric network on shutdown', async () => {
mockGateway.disconnect.mockResolvedValue(undefined);
await service.onApplicationShutdown();
expect(mockGateway.disconnect).toHaveBeenCalledTimes(1);
});
it('should disconnect with signal parameter', async () => {
mockGateway.disconnect.mockResolvedValue(undefined);
await service.onApplicationShutdown('SIGTERM');
expect(mockGateway.disconnect).toHaveBeenCalledTimes(1);
});
it('should not throw error if disconnect fails - just log it', async () => {
const disconnectError = new Error('Disconnect failed');
mockGateway.disconnect.mockRejectedValue(disconnectError);
// Should NOT throw - graceful shutdown
await expect(service.onApplicationShutdown()).resolves.not.toThrow();
expect(mockGateway.disconnect).toHaveBeenCalledTimes(1);
});
it('should handle non-Error objects during disconnect gracefully', async () => {
mockGateway.disconnect.mockRejectedValue('String error');
await expect(service.onApplicationShutdown()).resolves.not.toThrow();
});
});
// =====================================================================
// storeLog
// =====================================================================
describe('storeLog', () => {
const mockStoreLogResult = {
transactionId: 'tx123',
status: 'COMMITTED',
};
it('should store log with all parameters', async () => {
mockGateway.storeLog.mockResolvedValue(mockStoreLogResult);
const result = await service.storeLog(
'log-1',
'CREATE',
'user-1',
'{"data": "test"}',
);
expect(mockGateway.storeLog).toHaveBeenCalledWith(
'log-1',
'CREATE',
'user-1',
'{"data": "test"}',
);
expect(result).toEqual(mockStoreLogResult);
});
it('should wrap gateway errors with InternalServerErrorException', async () => {
const storeError = new Error('Transaction failed');
mockGateway.storeLog.mockRejectedValue(storeError);
await expect(
service.storeLog('log-1', 'CREATE', 'user-1', '{}'),
).rejects.toThrow(InternalServerErrorException);
await expect(
service.storeLog('log-1', 'CREATE', 'user-1', '{}'),
).rejects.toThrow('Gagal menyimpan log ke blockchain');
});
it('should not validate empty id (NO VALIDATION)', async () => {
mockGateway.storeLog.mockResolvedValue(mockStoreLogResult);
// Empty ID passes through without validation
await service.storeLog('', 'CREATE', 'user-1', '{}');
expect(mockGateway.storeLog).toHaveBeenCalledWith(
'',
'CREATE',
'user-1',
'{}',
);
});
it('should not validate empty event (NO VALIDATION)', async () => {
mockGateway.storeLog.mockResolvedValue(mockStoreLogResult);
await service.storeLog('log-1', '', 'user-1', '{}');
expect(mockGateway.storeLog).toHaveBeenCalledWith(
'log-1',
'',
'user-1',
'{}',
);
});
it('should not validate empty user_id (NO VALIDATION)', async () => {
mockGateway.storeLog.mockResolvedValue(mockStoreLogResult);
await service.storeLog('log-1', 'CREATE', '', '{}');
expect(mockGateway.storeLog).toHaveBeenCalledWith(
'log-1',
'CREATE',
'',
'{}',
);
});
it('should not validate malformed JSON payload (NO VALIDATION)', async () => {
mockGateway.storeLog.mockResolvedValue(mockStoreLogResult);
// Invalid JSON passes through
await service.storeLog('log-1', 'CREATE', 'user-1', 'not-valid-json');
expect(mockGateway.storeLog).toHaveBeenCalledWith(
'log-1',
'CREATE',
'user-1',
'not-valid-json',
);
});
});
// =====================================================================
// getLogById
// =====================================================================
describe('getLogById', () => {
const mockLog = {
id: 'log-1',
event: 'CREATE',
user_id: 'user-1',
payload: '{}',
timestamp: '2024-01-01T00:00:00Z',
};
it('should retrieve log by id', async () => {
mockGateway.getLogById.mockResolvedValue(mockLog);
const result = await service.getLogById('log-1');
expect(mockGateway.getLogById).toHaveBeenCalledWith('log-1');
expect(result).toEqual(mockLog);
});
it('should wrap gateway errors with InternalServerErrorException', async () => {
const notFoundError = new Error('Log not found');
mockGateway.getLogById.mockRejectedValue(notFoundError);
await expect(service.getLogById('non-existent')).rejects.toThrow(
InternalServerErrorException,
);
await expect(service.getLogById('non-existent')).rejects.toThrow(
'Gagal mengambil log dari blockchain',
);
});
/**
* ISSUE FOUND: No validation for empty or null id.
*/
it('should not validate empty id (NO VALIDATION)', async () => {
mockGateway.getLogById.mockResolvedValue(null);
await service.getLogById('');
expect(mockGateway.getLogById).toHaveBeenCalledWith('');
});
});
// =====================================================================
// getAllLogs
// =====================================================================
describe('getAllLogs', () => {
const mockLogs = [
{ id: 'log-1', event: 'CREATE' },
{ id: 'log-2', event: 'UPDATE' },
];
it('should retrieve all logs', async () => {
mockGateway.getAllLogs.mockResolvedValue(mockLogs);
const result = await service.getAllLogs();
expect(mockGateway.getAllLogs).toHaveBeenCalledTimes(1);
expect(result).toEqual(mockLogs);
});
it('should return empty array when no logs exist', async () => {
mockGateway.getAllLogs.mockResolvedValue([]);
const result = await service.getAllLogs();
expect(result).toEqual([]);
});
it('should wrap gateway errors with InternalServerErrorException', async () => {
const queryError = new Error('Query failed');
mockGateway.getAllLogs.mockRejectedValue(queryError);
await expect(service.getAllLogs()).rejects.toThrow(
InternalServerErrorException,
);
await expect(service.getAllLogs()).rejects.toThrow(
'Gagal mengambil semua log dari blockchain',
);
});
});
// =====================================================================
// getLogsWithPagination
// =====================================================================
describe('getLogsWithPagination', () => {
const mockPaginatedResult = {
records: [{ id: 'log-1' }, { id: 'log-2' }],
bookmark: 'next-page-bookmark',
fetchedRecordsCount: 2,
};
it('should retrieve logs with pagination', async () => {
mockGateway.getLogsWithPagination.mockResolvedValue(mockPaginatedResult);
const result = await service.getLogsWithPagination(10, '');
expect(mockGateway.getLogsWithPagination).toHaveBeenCalledWith(10, '');
expect(result).toEqual(mockPaginatedResult);
});
it('should pass bookmark for subsequent pages', async () => {
mockGateway.getLogsWithPagination.mockResolvedValue(mockPaginatedResult);
await service.getLogsWithPagination(10, 'page-2-bookmark');
expect(mockGateway.getLogsWithPagination).toHaveBeenCalledWith(
10,
'page-2-bookmark',
);
});
it('should wrap gateway errors with InternalServerErrorException', async () => {
const paginationError = new Error('Pagination failed');
mockGateway.getLogsWithPagination.mockRejectedValue(paginationError);
await expect(service.getLogsWithPagination(10, '')).rejects.toThrow(
InternalServerErrorException,
);
await expect(service.getLogsWithPagination(10, '')).rejects.toThrow(
'Gagal mengambil log dengan paginasi dari blockchain',
);
});
/**
* ISSUE FOUND: No validation for pageSize.
* Negative, zero, or extremely large values pass through.
*/
it('should not validate zero pageSize (NO VALIDATION)', async () => {
mockGateway.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(0, '');
expect(mockGateway.getLogsWithPagination).toHaveBeenCalledWith(0, '');
});
it('should not validate negative pageSize (NO VALIDATION)', async () => {
mockGateway.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(-5, '');
expect(mockGateway.getLogsWithPagination).toHaveBeenCalledWith(-5, '');
});
it('should not validate extremely large pageSize (NO VALIDATION)', async () => {
mockGateway.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(999999999, '');
expect(mockGateway.getLogsWithPagination).toHaveBeenCalledWith(
999999999,
'',
);
});
});
describe('Code Review Issues', () => {
it('should use dependency injection for FabricGateway', () => {
expect(service).toBeDefined();
// Gateway is now injected, we can test it directly
expect(mockGateway.connect).toBeDefined();
});
it('should wrap errors with NestJS InternalServerErrorException', async () => {
const rawError = new Error('Raw gateway error');
mockGateway.storeLog.mockRejectedValue(rawError);
// Errors are now wrapped with InternalServerErrorException
await expect(
service.storeLog('log-1', 'CREATE', 'user-1', '{}'),
).rejects.toThrow(InternalServerErrorException);
});
it('should accept signal parameter for shutdown logging', async () => {
mockGateway.disconnect.mockResolvedValue(undefined);
// Signal is now logged (though we can't verify without mocking Logger)
await service.onApplicationShutdown('SIGTERM');
expect(mockGateway.disconnect).toHaveBeenCalled();
});
});
});

View File

@ -1,6 +1,7 @@
import FabricGateway from '@api/common/fabric-gateway'; import FabricGateway from '@api/common/fabric-gateway';
import { import {
Injectable, Injectable,
InternalServerErrorException,
Logger, Logger,
OnApplicationShutdown, OnApplicationShutdown,
OnModuleInit, OnModuleInit,
@ -8,8 +9,10 @@ import {
@Injectable() @Injectable()
export class FabricService implements OnModuleInit, OnApplicationShutdown { export class FabricService implements OnModuleInit, OnApplicationShutdown {
private readonly logger = new Logger(FabricService.name); constructor(
private readonly gateway = new FabricGateway(); private readonly gateway: FabricGateway,
private readonly logger: Logger,
) {}
async onModuleInit() { async onModuleInit() {
this.logger.log('Attempting to connect to Fabric network...'); this.logger.log('Attempting to connect to Fabric network...');
@ -17,35 +20,94 @@ export class FabricService implements OnModuleInit, OnApplicationShutdown {
await this.gateway.connect(); await this.gateway.connect();
this.logger.log('Successfully connected to Fabric network.'); this.logger.log('Successfully connected to Fabric network.');
} catch (error) { } catch (error) {
this.logger.error('Failed to connect to Fabric network:', error); const errorMessage =
throw new Error('Failed to connect to Fabric network'); error instanceof Error ? error.message : 'Unknown error';
this.logger.error(
`Failed to connect to Fabric network: ${errorMessage}`,
error instanceof Error ? error.stack : undefined,
);
throw new Error(`Failed to connect to Fabric network: ${errorMessage}`);
} }
} }
async onApplicationShutdown(signal?: string) { async onApplicationShutdown(signal?: string) {
this.logger.log('Disconnecting from Fabric network...'); this.logger.log(
`Disconnecting from Fabric network...${signal ? ` (signal: ${signal})` : ''}`,
);
try {
await this.gateway.disconnect(); await this.gateway.disconnect();
this.logger.log('Successfully disconnected from Fabric network.');
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Unknown error';
this.logger.error(
`Failed to disconnect from Fabric network: ${errorMessage}`,
error instanceof Error ? error.stack : undefined,
);
}
} }
/**
* Menyimpan entri log ke blockchain Fabric.
*
* @param id - ID unik log (harus tidak kosong, divalidasi oleh method pemanggil). Contoh nilai: 'REKAM_12XX'
* @param event - Jenis event (harus tidak kosong, divalidasi oleh method pemanggil). Contoh Nilai: 'CREATE'
* @param user_id - ID pengguna (harus valid, divalidasi oleh method pemanggil). Contoh Nilai: '1'
* @param payload - Payload string berupa Hash dari payload data (method hanya menerima string berupa hash). Contoh Nilai: '4f9075ab9fc724a0xxxx'
*
* @throws Error if Fabric gateway fails
*/
async storeLog(id: string, event: string, user_id: string, payload: string) { async storeLog(id: string, event: string, user_id: string, payload: string) {
this.logger.log(`Storing log with ID: ${id}`); this.logger.log(`Storing log with ID: ${id}`);
return this.gateway.storeLog(id, event, user_id, payload); try {
return await this.gateway.storeLog(id, event, user_id, payload);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
this.logger.error(`Failed to store log: ${message}`);
throw new InternalServerErrorException(
'Gagal menyimpan log ke blockchain',
);
}
} }
async getLogById(id: string) { async getLogById(id: string) {
this.logger.log(`Retrieving log with ID: ${id}`); this.logger.log(`Retrieving log with ID: ${id}`);
return this.gateway.getLogById(id); try {
return await this.gateway.getLogById(id);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
this.logger.error(`Failed to get log by ID: ${message}`);
throw new InternalServerErrorException(
'Gagal mengambil log dari blockchain',
);
}
} }
async getAllLogs() { async getAllLogs() {
this.logger.log('Retrieving all logs from Fabric network'); this.logger.log('Retrieving all logs from Fabric network');
return this.gateway.getAllLogs(); try {
return await this.gateway.getAllLogs();
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
this.logger.error(`Failed to get all logs: ${message}`);
throw new InternalServerErrorException(
'Gagal mengambil semua log dari blockchain',
);
}
} }
async getLogsWithPagination(pageSize: number, bookmark: string) { async getLogsWithPagination(pageSize: number, bookmark: string) {
this.logger.log( this.logger.log(
`Retrieving logs with pagination - Page Size: ${pageSize}, Bookmark: ${bookmark}`, `Retrieving logs with pagination - Page Size: ${pageSize}, Bookmark: ${bookmark}`,
); );
return this.gateway.getLogsWithPagination(pageSize, bookmark); try {
return await this.gateway.getLogsWithPagination(pageSize, bookmark);
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
this.logger.error(`Failed to get logs with pagination: ${message}`);
throw new InternalServerErrorException(
'Gagal mengambil log dengan paginasi dari blockchain',
);
}
} }
} }

View File

@ -1,11 +1,4 @@
import { import { IsString, IsNotEmpty, Length, IsEnum } from 'class-validator';
IsString,
IsNotEmpty,
Length,
IsJSON,
IsEnum,
IsNumber,
} from 'class-validator';
export class StoreLogDto { export class StoreLogDto {
@IsNotEmpty({ message: 'ID wajib diisi' }) @IsNotEmpty({ message: 'ID wajib diisi' })
@ -34,7 +27,8 @@ export class StoreLogDto {
event: string; event: string;
@IsNotEmpty({ message: 'User ID wajib diisi' }) @IsNotEmpty({ message: 'User ID wajib diisi' })
user_id: number | string; @IsString({ message: 'User ID harus berupa string' })
user_id: string;
@IsNotEmpty({ message: 'Payload wajib diisi' }) @IsNotEmpty({ message: 'Payload wajib diisi' })
@IsString({ message: 'Payload harus berupa string' }) @IsString({ message: 'Payload harus berupa string' })

View File

@ -0,0 +1,391 @@
/**
* BackfillService - Database to Blockchain Migration
*
* STATUS: NOT IN USE (preserved for future use)
*
* This service syncs existing database records to the blockchain.
* It was designed for initial data migration and can be re-enabled
* when needed.
*
* To enable:
* 1. Add BackfillService to LogModule providers
* 2. Create a controller endpoint or CLI command to trigger it
* 3. Ensure BACKFILL_USER_ID is set in environment
*
* @see git log for original implementation history
*/
import { Injectable, Logger } from '@nestjs/common';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { sha256 } from '@api/common/crypto/hash';
import { PrismaService } from '../prisma/prisma.service';
import { FabricService } from '../fabric/fabric.service';
import type {
pemberian_obat as PemberianObat,
pemberian_tindakan as PemberianTindakan,
rekam_medis as RekamMedis,
} from '@dist/generated/prisma';
export interface BackfillFailure {
entity: EntityKey;
id: string;
reason: string;
timestamp: string;
}
interface BackfillState {
cursors: Partial<Record<EntityKey, string>>;
failures: Record<string, BackfillFailure>;
metadata?: Partial<
Record<
EntityKey,
{
lastRunAt: string;
processed: number;
success: number;
failed: number;
}
>
>;
}
export interface BackfillSummary {
processed: number;
success: number;
failed: number;
lastCursor: string | null;
failures: BackfillFailure[];
}
export type EntityKey = 'pemberian_obat' | 'rekam_medis' | 'pemberian_tindakan';
@Injectable()
export class BackfillService {
private readonly logger = new Logger(BackfillService.name);
private readonly statePath = path.resolve(
process.cwd(),
'backfill-state.json',
);
private readonly backfillUserId = process.env.BACKFILL_USER_ID ?? '9';
constructor(
private readonly fabricService: FabricService,
private readonly prisma: PrismaService,
) {}
async storeFromDBToBlockchain(
limitPerEntity = 5,
batchSize = 1,
): Promise<{
summaries: Record<string, BackfillSummary>;
checkpointFile: string;
}> {
const state = await this.loadState();
const summaries = {
pemberian_obat: await this.syncPemberianObat(
state,
limitPerEntity,
batchSize,
),
rekam_medis: await this.syncRekamMedis(state, limitPerEntity, batchSize),
pemberian_tindakan: await this.syncPemberianTindakan(
state,
limitPerEntity,
batchSize,
),
} as Record<EntityKey, BackfillSummary>;
const timestamp = new Date().toISOString();
await this.persistState({
...state,
metadata: {
...(state.metadata ?? {}),
pemberian_obat: {
lastRunAt: timestamp,
processed: summaries.pemberian_obat.processed,
success: summaries.pemberian_obat.success,
failed: summaries.pemberian_obat.failed,
},
rekam_medis: {
lastRunAt: timestamp,
processed: summaries.rekam_medis.processed,
success: summaries.rekam_medis.success,
failed: summaries.rekam_medis.failed,
},
pemberian_tindakan: {
lastRunAt: timestamp,
processed: summaries.pemberian_tindakan.processed,
success: summaries.pemberian_tindakan.success,
failed: summaries.pemberian_tindakan.failed,
},
},
});
return {
summaries,
checkpointFile: this.statePath,
};
}
private async syncPemberianObat(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<PemberianObat>(
state,
'pemberian_obat',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id: 'asc' },
take,
};
if (cursor) {
query.cursor = { id: Number(cursor) };
query.skip = 1;
}
return this.prisma.pemberian_obat.findMany(query);
},
async (record) => {
const payload = {
obat: record.obat,
jumlah_obat: record.jumlah_obat,
aturan_pakai: record.aturan_pakai,
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`OBAT_${record.id}`,
'obat_created',
this.backfillUserId,
payloadHash,
);
return `${record.id}`;
},
(record) => `${record.id}`,
);
}
private async syncRekamMedis(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<RekamMedis>(
state,
'rekam_medis',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id_visit: 'asc' },
take,
};
if (cursor) {
query.cursor = { id_visit: cursor };
query.skip = 1;
}
return this.prisma.rekam_medis.findMany(query);
},
async (record) => {
const payload = {
dokter_id: 123,
visit_id: record.id_visit,
anamnese: record.anamnese ?? '',
jenis_kasus: record.jenis_kasus ?? '',
tindak_lanjut: record.tindak_lanjut ?? '',
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`REKAM_${record.id_visit}`,
'rekam_medis_created',
this.backfillUserId,
payloadHash,
);
return record.id_visit;
},
(record) => record.id_visit,
);
}
private async syncPemberianTindakan(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<PemberianTindakan>(
state,
'pemberian_tindakan',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id: 'asc' },
take,
};
if (cursor) {
query.cursor = { id: Number(cursor) };
query.skip = 1;
}
return this.prisma.pemberian_tindakan.findMany(query);
},
async (record) => {
const payload = {
id_visit: record.id_visit,
tindakan: record.tindakan,
kategori_tindakan: record.kategori_tindakan ?? null,
kelompok_tindakan: record.kelompok_tindakan ?? null,
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`TINDAKAN_${record.id}`,
'tindakan_dokter_created',
this.backfillUserId,
payloadHash,
);
return `${record.id}`;
},
(record) => `${record.id}`,
);
}
private async syncEntity<T>(
state: BackfillState,
entity: EntityKey,
limit: number,
batchSize: number,
fetchBatch: (cursor: string | null, take: number) => Promise<T[]>,
processRecord: (record: T) => Promise<string>,
recordIdentifier: (record: T) => string,
): Promise<BackfillSummary> {
let cursor = state.cursors[entity] ?? null;
let processed = 0;
let success = 0;
let failed = 0;
while (processed < limit) {
const remaining = limit - processed;
if (remaining <= 0) {
break;
}
const take = Math.min(batchSize, remaining);
const records = await fetchBatch(cursor, take);
if (!records || records.length === 0) {
break;
}
const results = await Promise.allSettled(
records.map(async (record) => processRecord(record)),
);
results.forEach((result, index) => {
const id = recordIdentifier(records[index]);
const key = this.failureKey(entity, id);
if (result.status === 'fulfilled') {
success += 1;
delete state.failures[key];
} else {
failed += 1;
const failure = {
entity,
id,
reason: this.serializeError(result.reason),
timestamp: new Date().toISOString(),
} satisfies BackfillFailure;
state.failures[key] = failure;
this.logger.warn(
`Failed to backfill ${entity} ${id}: ${failure.reason}`,
);
}
});
processed += records.length;
cursor = recordIdentifier(records[records.length - 1]);
state.cursors[entity] = cursor;
await this.persistState(state);
if (records.length < take) {
break;
}
}
return {
processed,
success,
failed,
lastCursor: cursor,
failures: this.collectFailures(entity, state),
};
}
private async loadState(): Promise<BackfillState> {
try {
const raw = await fs.readFile(this.statePath, 'utf8');
const parsed = JSON.parse(raw);
return {
cursors: parsed.cursors ?? {},
failures: parsed.failures ?? {},
metadata: parsed.metadata ?? {},
} satisfies BackfillState;
} catch (error) {
const err = error as NodeJS.ErrnoException;
if (err.code === 'ENOENT') {
return {
cursors: {},
failures: {},
metadata: {},
};
}
throw error;
}
}
private async persistState(state: BackfillState) {
const serializable = {
cursors: state.cursors,
failures: state.failures,
metadata: state.metadata ?? {},
};
await fs.mkdir(path.dirname(this.statePath), { recursive: true });
await fs.writeFile(
this.statePath,
JSON.stringify(serializable, null, 2),
'utf8',
);
}
private collectFailures(
entity: EntityKey,
state: BackfillState,
): BackfillFailure[] {
return Object.values(state.failures).filter(
(entry) => entry.entity === entity,
);
}
private failureKey(entity: EntityKey, id: string) {
return `${entity}:${id}`;
}
private serializeError(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
if (typeof error === 'string') {
return error;
}
try {
return JSON.stringify(error);
} catch {
return String(error);
}
}
}

View File

@ -1,18 +0,0 @@
import { Test, TestingModule } from '@nestjs/testing';
import { LogController } from './log.controller';
describe('LogController', () => {
let controller: LogController;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [LogController],
}).compile();
controller = module.get<LogController>(LogController);
});
it('should be defined', () => {
expect(controller).toBeDefined();
});
});

View File

@ -1,29 +0,0 @@
import { Controller, Post, UseGuards } from '@nestjs/common';
import { LogService } from './log.service';
import { AuthGuard } from '../auth/guard/auth.guard';
@Controller('log')
export class LogController {
constructor(private readonly logService: LogService) {}
@Post('/store-to-blockchain')
@UseGuards(AuthGuard)
async storeLog() {
return this.logService.storeFromDBToBlockchain();
}
// @Post()
// storeLog(@Body() dto: StoreLogDto) {
// return this.logService.storeLog(dto);
// }
// @Get(':id')
// getLogById(@Param('id') id: string) {
// return this.logService.getLogById(id);
// }
// @Get()
// getAllLogs() {
// return this.logService.getAllLogs();
// }
}

View File

@ -1,12 +1,9 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { LogController } from './log.controller';
import { LogService } from './log.service'; import { LogService } from './log.service';
import { FabricModule } from '../fabric/fabric.module'; import { FabricModule } from '../fabric/fabric.module';
import { PrismaModule } from '../prisma/prisma.module';
@Module({ @Module({
imports: [FabricModule, PrismaModule], imports: [FabricModule],
controllers: [LogController],
providers: [LogService], providers: [LogService],
exports: [LogService], exports: [LogService],
}) })

View File

@ -2,35 +2,587 @@ import { Test, TestingModule } from '@nestjs/testing';
import { LogService } from './log.service'; import { LogService } from './log.service';
import { FabricService } from '../fabric/fabric.service'; import { FabricService } from '../fabric/fabric.service';
import { PrismaService } from '../prisma/prisma.service'; import { PrismaService } from '../prisma/prisma.service';
import { StoreLogDto } from './dto/store-log.dto';
describe('LogService', () => { describe('LogService', () => {
let service: LogService; let service: LogService;
let mockFabricService: {
storeLog: jest.Mock;
getLogById: jest.Mock;
getLogsWithPagination: jest.Mock;
};
let mockPrismaService: {
pemberian_obat: { findMany: jest.Mock };
rekam_medis: { findMany: jest.Mock };
pemberian_tindakan: { findMany: jest.Mock };
};
beforeEach(async () => { beforeEach(async () => {
const fabricServiceMock = { jest.clearAllMocks();
mockFabricService = {
storeLog: jest.fn(), storeLog: jest.fn(),
getLogById: jest.fn(), getLogById: jest.fn(),
getLogsWithPagination: jest.fn(), getLogsWithPagination: jest.fn(),
} as unknown as FabricService; };
const prismaServiceMock = { mockPrismaService = {
pemberian_obat: { findMany: jest.fn() }, pemberian_obat: { findMany: jest.fn() },
rekam_medis: { findMany: jest.fn() }, rekam_medis: { findMany: jest.fn() },
pemberian_tindakan: { findMany: jest.fn() }, pemberian_tindakan: { findMany: jest.fn() },
} as unknown as PrismaService; };
const module: TestingModule = await Test.createTestingModule({ const module: TestingModule = await Test.createTestingModule({
providers: [ providers: [
LogService, LogService,
{ provide: FabricService, useValue: fabricServiceMock }, { provide: FabricService, useValue: mockFabricService },
{ provide: PrismaService, useValue: prismaServiceMock }, { provide: PrismaService, useValue: mockPrismaService },
], ],
}).compile(); }).compile();
service = module.get<LogService>(LogService); service = module.get<LogService>(LogService);
}); });
describe('constructor', () => {
it('should be defined', () => { it('should be defined', () => {
expect(service).toBeDefined(); expect(service).toBeDefined();
}); });
}); });
// =====================================================================
// storeLog
// =====================================================================
describe('storeLog', () => {
const validDto: StoreLogDto = {
id: 'REKAM_123',
event: 'rekam_medis_created',
user_id: '1',
payload: 'abc123hash',
};
it('should store log with valid DTO', async () => {
const mockResult = { transactionId: 'tx123', status: 'COMMITTED' };
mockFabricService.storeLog.mockResolvedValue(mockResult);
const result = await service.storeLog(validDto);
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
'REKAM_123',
'rekam_medis_created',
'1', // user_id converted to string
'abc123hash',
);
expect(result).toEqual(mockResult);
});
it('should convert numeric user_id to string', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, user_id: '42' });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
expect.any(String),
'42', // number converted to string
expect.any(String),
);
});
it('should handle string user_id', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, user_id: 'user-abc' });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
expect.any(String),
'user-abc',
expect.any(String),
);
});
it('should propagate errors from FabricService', async () => {
const error = new Error('Fabric transaction failed');
mockFabricService.storeLog.mockRejectedValue(error);
await expect(service.storeLog(validDto)).rejects.toThrow(
'Fabric transaction failed',
);
});
/**
* ISSUE: No validation in service layer.
* The DTO has class-validator decorators, but they only work
* with ValidationPipe in the controller. Direct service calls
* bypass validation.
*/
it('should not validate empty id (NO VALIDATION IN SERVICE)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, id: '' });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
'',
expect.any(String),
expect.any(String),
expect.any(String),
);
});
it('should not validate empty event (NO VALIDATION IN SERVICE)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, event: '' });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
'',
expect.any(String),
expect.any(String),
);
});
// =====================================================================
// Edge Cases: null/undefined inputs
// =====================================================================
describe('edge cases - null/undefined inputs', () => {
it('should throw when user_id is null (toString fails)', async () => {
await expect(
service.storeLog({ ...validDto, user_id: null as any }),
).rejects.toThrow();
expect(mockFabricService.storeLog).not.toHaveBeenCalled();
});
it('should throw when user_id is undefined (toString fails)', async () => {
await expect(
service.storeLog({ ...validDto, user_id: undefined as any }),
).rejects.toThrow();
expect(mockFabricService.storeLog).not.toHaveBeenCalled();
});
it('should pass null id to FabricService (NO VALIDATION)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, id: null as any });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
null,
expect.any(String),
expect.any(String),
expect.any(String),
);
});
it('should pass undefined id to FabricService (NO VALIDATION)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, id: undefined as any });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
undefined,
expect.any(String),
expect.any(String),
expect.any(String),
);
});
it('should pass null payload to FabricService (NO VALIDATION)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, payload: null as any });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
expect.any(String),
expect.any(String),
null,
);
});
it('should handle user_id = 0 (falsy but valid)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, user_id: '0' });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
expect.any(String),
'0',
expect.any(String),
);
});
it('should handle empty string user_id', async () => {
mockFabricService.storeLog.mockResolvedValue({});
await service.storeLog({ ...validDto, user_id: '' as any });
expect(mockFabricService.storeLog).toHaveBeenCalledWith(
expect.any(String),
expect.any(String),
'',
expect.any(String),
);
});
});
});
// =====================================================================
// getLogById
// =====================================================================
describe('getLogById', () => {
const mockLog = {
id: 'REKAM_123',
event: 'rekam_medis_created',
user_id: '1',
payload: 'hash123',
timestamp: '2024-01-01T00:00:00Z',
};
it('should retrieve log by id', async () => {
mockFabricService.getLogById.mockResolvedValue(mockLog);
const result = await service.getLogById('REKAM_123');
expect(mockFabricService.getLogById).toHaveBeenCalledWith('REKAM_123');
expect(result).toEqual(mockLog);
});
it('should handle non-existent log', async () => {
mockFabricService.getLogById.mockResolvedValue(null);
const result = await service.getLogById('NON_EXISTENT');
expect(result).toBeNull();
});
it('should propagate errors from FabricService', async () => {
const error = new Error('Log not found');
mockFabricService.getLogById.mockRejectedValue(error);
await expect(service.getLogById('ERROR_ID')).rejects.toThrow(
'Log not found',
);
});
/**
* ISSUE: No validation for empty id parameter.
*/
it('should not validate empty id (NO VALIDATION)', async () => {
mockFabricService.getLogById.mockResolvedValue(null);
await service.getLogById('');
expect(mockFabricService.getLogById).toHaveBeenCalledWith('');
});
// =====================================================================
// Edge Cases: null/undefined inputs
// =====================================================================
describe('edge cases - null/undefined inputs', () => {
it('should pass null id to FabricService (NO VALIDATION)', async () => {
mockFabricService.getLogById.mockResolvedValue(null);
await service.getLogById(null as any);
expect(mockFabricService.getLogById).toHaveBeenCalledWith(null);
});
it('should pass undefined id to FabricService (NO VALIDATION)', async () => {
mockFabricService.getLogById.mockResolvedValue(null);
await service.getLogById(undefined as any);
expect(mockFabricService.getLogById).toHaveBeenCalledWith(undefined);
});
});
});
// =====================================================================
// getLogsWithPagination
// =====================================================================
describe('getLogsWithPagination', () => {
const mockPaginatedResult = {
records: [{ id: 'log-1' }, { id: 'log-2' }],
bookmark: 'next-page-bookmark',
fetchedRecordsCount: 2,
};
it('should retrieve logs with pagination', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue(
mockPaginatedResult,
);
const result = await service.getLogsWithPagination(10, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
10,
'',
);
expect(result).toEqual(mockPaginatedResult);
});
it('should pass bookmark for subsequent pages', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue(
mockPaginatedResult,
);
await service.getLogsWithPagination(10, 'page-2-bookmark');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
10,
'page-2-bookmark',
);
});
it('should return empty result when no logs exist', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
fetchedRecordsCount: 0,
});
const result = await service.getLogsWithPagination(10, '');
expect(result.records).toEqual([]);
expect(result.fetchedRecordsCount).toBe(0);
});
it('should propagate errors from FabricService', async () => {
const error = new Error('Pagination failed');
mockFabricService.getLogsWithPagination.mockRejectedValue(error);
await expect(service.getLogsWithPagination(10, '')).rejects.toThrow(
'Pagination failed',
);
});
/**
* ISSUE: No validation for pageSize parameter.
* Zero, negative, or extremely large values pass through.
*/
it('should not validate zero pageSize (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(0, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
0,
'',
);
});
it('should not validate negative pageSize (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(-5, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
-5,
'',
);
});
// =====================================================================
// Edge Cases: null/undefined inputs
// =====================================================================
describe('edge cases - null/undefined inputs', () => {
it('should pass null pageSize to FabricService (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(null as any, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
null,
'',
);
});
it('should pass undefined pageSize to FabricService (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(undefined as any, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
undefined,
'',
);
});
it('should pass null bookmark to FabricService (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(10, null as any);
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
10,
null,
);
});
it('should handle NaN pageSize (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(NaN, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
NaN,
'',
);
});
it('should handle Infinity pageSize (NO VALIDATION)', async () => {
mockFabricService.getLogsWithPagination.mockResolvedValue({
records: [],
bookmark: '',
});
await service.getLogsWithPagination(Infinity, '');
expect(mockFabricService.getLogsWithPagination).toHaveBeenCalledWith(
Infinity,
'',
);
});
});
});
// =====================================================================
// CODE REVIEW FINDINGS
// =====================================================================
describe('Code Review Issues', () => {
/**
* CRITICAL ISSUE 1: storeFromDBToBlockchain method is commented out!
*
* The controller calls this.logService.storeFromDBToBlockchain()
* but the entire implementation is commented out in the service.
* This will cause a runtime error when the endpoint is called.
*/
it('should document that storeFromDBToBlockchain is commented out (BROKEN ENDPOINT)', () => {
// Check if the method exists on the service
expect(typeof (service as any).storeFromDBToBlockchain).toBe('undefined');
});
/**
* ISSUE 2: Hardcoded backfillUserId.
*
* The service uses process.env.BACKFILL_USER_ID ?? '9' which defaults to '9'.
* This could cause audit issues if the default is used unintentionally.
*/
it('should have backfillUserId property', () => {
// This is a private property, testing its existence indirectly
expect(service).toBeDefined();
});
/**
* ISSUE 3: statePath uses process.cwd() which can vary.
*
* In different environments (dev, test, prod), process.cwd()
* may return different paths, causing file access issues.
*/
/**
* ISSUE 4: No error handling in storeLog method.
*
* Errors from FabricService propagate unchanged.
* Should wrap with appropriate NestJS exceptions.
*/
it('should not transform errors (MISSING ERROR HANDLING)', async () => {
const rawError = new Error('Raw fabric error');
mockFabricService.storeLog.mockRejectedValue(rawError);
await expect(
service.storeLog({
id: 'test',
event: 'rekam_medis_created',
user_id: '1',
payload: 'hash',
}),
).rejects.toThrow('Raw fabric error');
});
/**
* ISSUE 5: Commented out code is ~300 lines.
*
* The storeFromDBToBlockchain and related methods are all commented.
* This should either be:
* - Removed if not needed
* - Uncommented and tested if needed
* - Moved to a separate branch/PR
*/
/**
* ISSUE 6: The DTO allows both number and string for user_id.
*
* StoreLogDto has: user_id: number | string;
*
* This is inconsistent - the service converts it to string anyway.
* Should pick one type and stick with it.
*/
it('should handle both number and string user_id (TYPE INCONSISTENCY)', async () => {
mockFabricService.storeLog.mockResolvedValue({});
// Number
await service.storeLog({
id: 'test1',
event: 'rekam_medis_created',
user_id: '123',
payload: 'hash',
});
expect(mockFabricService.storeLog).toHaveBeenLastCalledWith(
'test1',
'rekam_medis_created',
'123',
'hash',
);
// String
await service.storeLog({
id: 'test2',
event: 'rekam_medis_created',
user_id: 'abc',
payload: 'hash',
});
expect(mockFabricService.storeLog).toHaveBeenLastCalledWith(
'test2',
'rekam_medis_created',
'abc',
'hash',
);
});
/**
* ISSUE 7: No logging in active methods.
*
* The service has a Logger but storeLog, getLogById, and
* getLogsWithPagination don't use it.
*/
});
});

View File

@ -1,62 +1,10 @@
import { Injectable, Logger } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { sha256 } from '@api/common/crypto/hash';
import { PrismaService } from '../prisma/prisma.service';
import { FabricService } from '../fabric/fabric.service'; import { FabricService } from '../fabric/fabric.service';
import { StoreLogDto } from './dto/store-log.dto'; import { StoreLogDto } from './dto/store-log.dto';
import type {
pemberian_obat as PemberianObat,
pemberian_tindakan as PemberianTindakan,
rekam_medis as RekamMedis,
} from '@dist/generated/prisma';
export interface BackfillFailure {
entity: EntityKey;
id: string;
reason: string;
timestamp: string;
}
interface BackfillState {
cursors: Partial<Record<EntityKey, string>>;
failures: Record<string, BackfillFailure>;
metadata?: Partial<
Record<
EntityKey,
{
lastRunAt: string;
processed: number;
success: number;
failed: number;
}
>
>;
}
export interface BackfillSummary {
processed: number;
success: number;
failed: number;
lastCursor: string | null;
failures: BackfillFailure[];
}
export type EntityKey = 'pemberian_obat' | 'rekam_medis' | 'pemberian_tindakan';
@Injectable() @Injectable()
export class LogService { export class LogService {
private readonly logger = new Logger(LogService.name); constructor(private readonly fabricService: FabricService) {}
private readonly statePath = path.resolve(
process.cwd(),
'backfill-state.json',
);
private readonly backfillUserId = process.env.BACKFILL_USER_ID ?? '9';
constructor(
private readonly fabricService: FabricService,
private readonly prisma: PrismaService,
) {}
async storeLog(dto: StoreLogDto) { async storeLog(dto: StoreLogDto) {
const { id, event, user_id, payload } = dto; const { id, event, user_id, payload } = dto;
@ -70,320 +18,4 @@ export class LogService {
async getLogsWithPagination(pageSize: number, bookmark: string) { async getLogsWithPagination(pageSize: number, bookmark: string) {
return this.fabricService.getLogsWithPagination(pageSize, bookmark); return this.fabricService.getLogsWithPagination(pageSize, bookmark);
} }
// async storeFromDBToBlockchain() {}
async storeFromDBToBlockchain(
limitPerEntity = 5,
batchSize = 1,
): Promise<{
summaries: Record<string, BackfillSummary>;
checkpointFile: string;
}> {
const state = await this.loadState();
const summaries = {
pemberian_obat: await this.syncPemberianObat(
state,
limitPerEntity,
batchSize,
),
rekam_medis: await this.syncRekamMedis(state, limitPerEntity, batchSize),
pemberian_tindakan: await this.syncPemberianTindakan(
state,
limitPerEntity,
batchSize,
),
} as Record<EntityKey, BackfillSummary>;
const timestamp = new Date().toISOString();
await this.persistState({
...state,
metadata: {
...(state.metadata ?? {}),
pemberian_obat: {
lastRunAt: timestamp,
processed: summaries.pemberian_obat.processed,
success: summaries.pemberian_obat.success,
failed: summaries.pemberian_obat.failed,
},
rekam_medis: {
lastRunAt: timestamp,
processed: summaries.rekam_medis.processed,
success: summaries.rekam_medis.success,
failed: summaries.rekam_medis.failed,
},
pemberian_tindakan: {
lastRunAt: timestamp,
processed: summaries.pemberian_tindakan.processed,
success: summaries.pemberian_tindakan.success,
failed: summaries.pemberian_tindakan.failed,
},
},
});
return {
summaries,
checkpointFile: this.statePath,
};
}
private async syncPemberianObat(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<PemberianObat>(
state,
'pemberian_obat',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id: 'asc' },
take,
};
if (cursor) {
query.cursor = { id: Number(cursor) };
query.skip = 1;
}
return this.prisma.pemberian_obat.findMany(query);
},
async (record) => {
const payload = {
obat: record.obat,
jumlah_obat: record.jumlah_obat,
aturan_pakai: record.aturan_pakai,
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`OBAT_${record.id}`,
'obat_created',
this.backfillUserId,
payloadHash,
);
return `${record.id}`;
},
(record) => `${record.id}`,
);
}
private async syncRekamMedis(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<RekamMedis>(
state,
'rekam_medis',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id_visit: 'asc' },
take,
};
if (cursor) {
query.cursor = { id_visit: cursor };
query.skip = 1;
}
return this.prisma.rekam_medis.findMany(query);
},
async (record) => {
const payload = {
dokter_id: 123,
visit_id: record.id_visit,
anamnese: record.anamnese ?? '',
jenis_kasus: record.jenis_kasus ?? '',
tindak_lanjut: record.tindak_lanjut ?? '',
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`REKAM_${record.id_visit}`,
'rekam_medis_created',
this.backfillUserId,
payloadHash,
);
return record.id_visit;
},
(record) => record.id_visit,
);
}
private async syncPemberianTindakan(
state: BackfillState,
limit: number,
batchSize: number,
): Promise<BackfillSummary> {
return this.syncEntity<PemberianTindakan>(
state,
'pemberian_tindakan',
limit,
batchSize,
async (cursor, take) => {
const query: any = {
orderBy: { id: 'asc' },
take,
};
if (cursor) {
query.cursor = { id: Number(cursor) };
query.skip = 1;
}
return this.prisma.pemberian_tindakan.findMany(query);
},
async (record) => {
const payload = {
id_visit: record.id_visit,
tindakan: record.tindakan,
kategori_tindakan: record.kategori_tindakan ?? null,
kelompok_tindakan: record.kelompok_tindakan ?? null,
};
const payloadHash = sha256(JSON.stringify(payload));
await this.fabricService.storeLog(
`TINDAKAN_${record.id}`,
'tindakan_dokter_created',
this.backfillUserId,
payloadHash,
);
return `${record.id}`;
},
(record) => `${record.id}`,
);
}
private async syncEntity<T>(
state: BackfillState,
entity: EntityKey,
limit: number,
batchSize: number,
fetchBatch: (cursor: string | null, take: number) => Promise<T[]>,
processRecord: (record: T) => Promise<string>,
recordIdentifier: (record: T) => string,
): Promise<BackfillSummary> {
let cursor = state.cursors[entity] ?? null;
let processed = 0;
let success = 0;
let failed = 0;
while (processed < limit) {
const remaining = limit - processed;
if (remaining <= 0) {
break;
}
const take = Math.min(batchSize, remaining);
const records = await fetchBatch(cursor, take);
if (!records || records.length === 0) {
break;
}
const results = await Promise.allSettled(
records.map(async (record) => processRecord(record)),
);
results.forEach((result, index) => {
const id = recordIdentifier(records[index]);
const key = this.failureKey(entity, id);
if (result.status === 'fulfilled') {
success += 1;
delete state.failures[key];
} else {
failed += 1;
const failure = {
entity,
id,
reason: this.serializeError(result.reason),
timestamp: new Date().toISOString(),
} satisfies BackfillFailure;
state.failures[key] = failure;
this.logger.warn(
`Failed to backfill ${entity} ${id}: ${failure.reason}`,
);
}
});
processed += records.length;
cursor = recordIdentifier(records[records.length - 1]);
state.cursors[entity] = cursor;
await this.persistState(state);
if (records.length < take) {
break;
}
}
return {
processed,
success,
failed,
lastCursor: cursor,
failures: this.collectFailures(entity, state),
};
}
private async loadState(): Promise<BackfillState> {
try {
const raw = await fs.readFile(this.statePath, 'utf8');
const parsed = JSON.parse(raw);
return {
cursors: parsed.cursors ?? {},
failures: parsed.failures ?? {},
metadata: parsed.metadata ?? {},
} satisfies BackfillState;
} catch (error) {
const err = error as NodeJS.ErrnoException;
if (err.code === 'ENOENT') {
return {
cursors: {},
failures: {},
metadata: {},
};
}
throw error;
}
}
private async persistState(state: BackfillState) {
const serializable = {
cursors: state.cursors,
failures: state.failures,
metadata: state.metadata ?? {},
};
await fs.mkdir(path.dirname(this.statePath), { recursive: true });
await fs.writeFile(
this.statePath,
JSON.stringify(serializable, null, 2),
'utf8',
);
}
private collectFailures(
entity: EntityKey,
state: BackfillState,
): BackfillFailure[] {
return Object.values(state.failures).filter(
(entry) => entry.entity === entity,
);
}
private failureKey(entity: EntityKey, id: string) {
return `${entity}:${id}`;
}
private serializeError(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
if (typeof error === 'string') {
return error;
}
try {
return JSON.stringify(error);
} catch {
return String(error);
}
}
} }

View File

@ -176,7 +176,7 @@ export class ObatService {
const data = { const data = {
id: `OBAT_${res.id}`, id: `OBAT_${res.id}`,
event: 'obat_created', event: 'obat_created',
user_id: userId, user_id: userId.toString(),
payload: payloadHash, payload: payloadHash,
}; };
const logResult = await this.logService.storeLog(data); const logResult = await this.logService.storeLog(data);

View File

@ -113,7 +113,7 @@ export class ProofService {
const response = await this.logService.storeLog({ const response = await this.logService.storeLog({
id: `PROOF_${payload.id_visit}`, id: `PROOF_${payload.id_visit}`,
event: 'proof_verification_logged', event: 'proof_verification_logged',
user_id: 'External', user_id: '0', // External user
payload: payloadHash, payload: payloadHash,
}); });

View File

@ -55,7 +55,7 @@ const onSubmit = handleSubmit(async (values: any) => {
if (error && Array.isArray(error.message)) { if (error && Array.isArray(error.message)) {
loginError.value = error.message[0]; loginError.value = error.message[0];
} else { } else {
loginError.value = "Terjadi kesalahan. Silakan coba lagi."; loginError.value = error.message || "Terjadi kesalahan saat login.";
} }
} finally { } finally {
isLoading.value = false; isLoading.value = false;