"use strict"; // CC_VERSION=1.3 CC_SEQUENCE=4 ./deployChaincode.sh const stringify = require("json-stringify-deterministic"); const sortKeysRecursive = require("sort-keys-recursive"); const { Contract } = require("fabric-contract-api"); const timestampToISOString = (timestamp) => { if (!timestamp) { return undefined; } const seconds = typeof timestamp.seconds?.toInt === "function" ? timestamp.seconds.toInt() : Number(timestamp.seconds || 0); const millis = seconds * 1000 + Math.floor((timestamp.nanos || 0) / 1e6); return new Date(millis).toISOString(); }; class LogVerification extends Contract { async beforeTransaction(ctx) { this.TxId = ctx.stub.getTxID(); console.log(`Transaction ID: ${this.TxId}`); } async InitLedger(ctx) { const logs = [ { id: "TINDAKAN_01", event: "Tindakan Dokter", user_id: "1", payload: "hash1", timestamp: "2023-10-01T09:00:00Z", }, { id: "OBAT_01", event: "Pemberian Obat", user_id: "1", payload: "hash1", timestamp: "2023-10-01T09:00:00Z", }, ]; for (const log of logs) { await ctx.stub.putState( log.id, Buffer.from(stringify(sortKeysRecursive(log))) ); } } async storeLog(ctx, id, event, user_id, payload) { if (!id || !event || !user_id || !payload) { throw new Error("All parameters must be provided and non-empty"); } const txTimestamp = ctx.stub.getTxTimestamp(); const timestamp = timestampToISOString(txTimestamp); const log = { id: id, event: event, user_id: user_id, payload: payload, timestamp: timestamp, }; try { await ctx.stub.putState( id, Buffer.from(stringify(sortKeysRecursive(log))) ); return stringify({ success: true, message: `Log ${id} stored successfully`, }); } catch (error) { throw new Error(`Failed to store log: ${error}`); } } async getLogById(ctx, id) { const iterator = await ctx.stub.getHistoryForKey(id); const logs = []; try { while (true) { const res = await iterator.next(); if (res.value && res.value.value.length > 0) { const payload = res.value.value.toString("utf8"); logs.push({ txId: res.value.txId, value: JSON.parse(payload), }); } if (res.done) { break; } } } finally { await iterator.close(); } if (logs.length === 0) { throw new Error(`Log ${id} does not exist`); } return logs; } async deleteLogById(ctx, id) { const exists = await this.logExists(ctx, id); if (!exists) { throw new Error(`Log ${id} does not exist`); } await ctx.stub.deleteState(id); } async getAllLogs(ctx) { const allResults = []; const iterator = await ctx.stub.getStateByRange("", ""); try { while (true) { const result = await iterator.next(); if (result.value && result.value.value.length > 0) { const strValue = result.value.value.toString("utf8"); try { allResults.push(JSON.parse(strValue)); } catch (err) { console.log(err); allResults.push(strValue); } } if (result.done) { break; } } } finally { await iterator.close(); } return allResults; } async getLogsWithPagination(ctx, pageSize, bookmark) { if (!pageSize || isNaN(pageSize) || pageSize <= 0) { throw new Error("Page size must be a positive integer"); } if (!bookmark) { bookmark = ""; } if (pageSize > 100) { throw new Error("Page size must not exceed 100"); } const { iterator, metadata } = await ctx.stub.getStateByRangeWithPagination( "", "", pageSize, bookmark ); const logs = []; try { while (true) { const res = await iterator.next(); if (res.value && res.value.value.length > 0) { const payload = res.value.value.toString("utf8"); try { logs.push({ txId: res.value.txId, value: JSON.parse(payload), }); } catch (err) { throw new Error(`Failed to parse log data: ${err}`); } } if (res.done) { break; } } } finally { await iterator.close(); } return { logs: logs, fetchedRecordsCount: metadata.fetchedRecordsCount, bookmark: metadata.bookmark, metadata: metadata, }; } async logExists(ctx, id) { const logJSON = await ctx.stub.getState(id); return logJSON && logJSON.length > 0; } async afterTransaction(ctx, result) { console.log(`Transaction ${this.TxId} has been committed.`); } } module.exports = LogVerification;