diff --git a/package-lock.json b/package-lock.json index 1c2c3f6530..2fae598307 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "origintrail_node", - "version": "8.2.2", + "version": "8.2.5", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "origintrail_node", - "version": "8.2.2", + "version": "8.2.5", "license": "ISC", "dependencies": { "@comunica/query-sparql": "^4.0.2", diff --git a/package.json b/package.json index 8ea0c3e0a1..36aafc45eb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "8.2.2", + "version": "8.2.5", "description": "OTNode V8", "main": "index.js", "type": "module", diff --git a/src/commands/cleaners/operation-id-cleaner-command.js b/src/commands/cleaners/operation-id-cleaner-command.js index a604c6430e..9935321bf5 100644 --- a/src/commands/cleaners/operation-id-cleaner-command.js +++ b/src/commands/cleaners/operation-id-cleaner-command.js @@ -3,6 +3,7 @@ import { BYTES_IN_KILOBYTE, OPERATION_ID_FILES_FOR_REMOVAL_MAX_NUMBER, OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS, OPERATION_ID_STATUS, COMMAND_PRIORITY, } from '../../constants/constants.js'; @@ -23,6 +24,25 @@ class OperationIdCleanerCommand extends Command { * @param command */ async execute() { + let memoryBytes = 0; + let fileBytes = 0; + try { + memoryBytes = this.operationIdService.getOperationIdMemoryCacheSizeBytes(); + } catch (error) { + this.logger.warn(`Unable to read memory cache footprint: ${error.message}`); + } + try { + fileBytes = await this.operationIdService.getOperationIdFileCacheSizeBytes(); + } catch (error) { + this.logger.warn(`Unable to read file cache footprint: ${error.message}`); + } + const bytesInMegabyte = 1024 * 1024; + this.logger.debug( + `Operation cache footprint before cleanup: memory=${( + memoryBytes / bytesInMegabyte + ).toFixed(2)}MB, files=${(fileBytes / bytesInMegabyte).toFixed(2)}MB`, + ); + this.logger.debug('Starting command for removal of expired cache files'); const timeToBeDeleted = Date.now() - OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS; await this.repositoryModuleManager.removeOperationIdRecord(timeToBeDeleted, [ @@ -30,7 +50,7 @@ class OperationIdCleanerCommand extends Command { OPERATION_ID_STATUS.FAILED, ]); let removed = await this.operationIdService.removeExpiredOperationIdMemoryCache( - OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS, ); if (removed) { this.logger.debug( @@ -68,7 +88,7 @@ class OperationIdCleanerCommand extends Command { default(map) { const command = { name: 'operationIdCleanerCommand', - period: OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + period: OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS, data: {}, transactional: false, priority: COMMAND_PRIORITY.LOWEST, diff --git a/src/commands/protocols/publish/publish-finalization-command.js b/src/commands/protocols/publish/publish-finalization-command.js index b314b5c53b..78db13bd01 100644 --- a/src/commands/protocols/publish/publish-finalization-command.js +++ b/src/commands/protocols/publish/publish-finalization-command.js @@ -33,6 +33,8 @@ class PublishFinalizationCommand extends Command { const { id, publishOperationId, merkleRoot, byteSize } = eventData; const { blockchain, contractAddress } = event; const operationId = this.operationIdService.generateId(); + const ual = this.ualService.deriveUAL(blockchain, contractAddress, id); + this.operationIdService.emitChangeEvent( OPERATION_ID_STATUS.PUBLISH_FINALIZATION.PUBLISH_FINALIZATION_START, operationId, @@ -70,8 +72,10 @@ class PublishFinalizationCommand extends Command { cachedMerkleRoot = result.merkleRoot; assertion = result.assertion; publisherPeerId = result.remotePeerId; - } catch (error) { - this.logger.error(`Failed to read cached publish data: ${error.message}`); // TODO: Make this log more descriptive + } catch (_error) { + this.logger.warn( + `[Cache] Failed to read cached publish data for UAL ${ual} (publishOperationId: ${publishOperationId}, txHash: ${txHash}, operationId: ${operationId})`, + ); this.operationIdService.emitChangeEvent( OPERATION_ID_STATUS.FAILED, operationId, @@ -81,8 +85,6 @@ class PublishFinalizationCommand extends Command { return Command.empty(); } - const ual = this.ualService.deriveUAL(blockchain, contractAddress, id); - try { await this.validatePublishData(merkleRoot, cachedMerkleRoot, byteSize, assertion, ual); } catch (e) { @@ -185,23 +187,24 @@ class PublishFinalizationCommand extends Command { async readWithRetries(publishOperationId) { let attempt = 0; + const datasetPath = this.fileService.getPendingStorageDocumentPath(publishOperationId); while (attempt < MAX_RETRIES_READ_CACHED_PUBLISH_DATA) { try { - const datasetPath = - this.fileService.getPendingStorageDocumentPath(publishOperationId); // eslint-disable-next-line no-await-in-loop const cachedData = await this.fileService.readFile(datasetPath, true); return cachedData; } catch (error) { attempt += 1; - // eslint-disable-next-line no-await-in-loop await new Promise((resolve) => { setTimeout(resolve, RETRY_DELAY_READ_CACHED_PUBLISH_DATA); }); } } + this.logger.warn( + `[Cache] Exhausted retries reading cached publish data (publishOperationId: ${publishOperationId}, path: ${datasetPath}).`, + ); // TODO: Mark this operation as failed throw new Error('Failed to read cached publish data'); } diff --git a/src/constants/constants.js b/src/constants/constants.js index e96c96b647..560bd08735 100644 --- a/src/constants/constants.js +++ b/src/constants/constants.js @@ -722,6 +722,11 @@ export const EXPECTED_TRANSACTION_ERRORS = { * operation id command cleanup interval time 24h */ export const OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS = 24 * 60 * 60 * 1000; +/** + * @constant {number} OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS - + * operation id memory cleanup interval time 1h + */ +export const OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS = 60 * 60 * 1000; /** * @constant {number} FINALIZED_COMMAND_CLEANUP_TIME_MILLS - Command cleanup interval time * finalized commands command cleanup interval time 24h diff --git a/src/controllers/http-api/v1/publish-http-api-controller-v1.js b/src/controllers/http-api/v1/publish-http-api-controller-v1.js index 4d7d19667b..1c72ac6385 100644 --- a/src/controllers/http-api/v1/publish-http-api-controller-v1.js +++ b/src/controllers/http-api/v1/publish-http-api-controller-v1.js @@ -5,6 +5,7 @@ import { OPERATION_STATUS, LOCAL_STORE_TYPES, COMMAND_PRIORITY, + PUBLISH_MIN_NUM_OF_NODE_REPLICATIONS, } from '../../../constants/constants.js'; class PublishController extends BaseController { @@ -16,6 +17,7 @@ class PublishController extends BaseController { this.repositoryModuleManager = ctx.repositoryModuleManager; this.pendingStorageService = ctx.pendingStorageService; this.networkModuleManager = ctx.networkModuleManager; + this.blockchainModuleManager = ctx.blockchainModuleManager; } async handleRequest(req, res) { @@ -62,6 +64,37 @@ class PublishController extends BaseController { datasetRoot, }); + let effectiveMinReplications = minimumNumberOfNodeReplications; + let chainMinNumber = null; + try { + const chainMin = await this.blockchainModuleManager.getMinimumRequiredSignatures( + blockchain, + ); + chainMinNumber = Number(chainMin); + } catch (err) { + this.logger.warn( + `Failed to fetch on-chain minimumRequiredSignatures for ${blockchain}: ${err.message}`, + ); + } + + const userMinNumber = Number(effectiveMinReplications); + const resolvedUserMin = + !Number.isNaN(userMinNumber) && userMinNumber > 0 + ? userMinNumber + : PUBLISH_MIN_NUM_OF_NODE_REPLICATIONS; + + if (!Number.isNaN(chainMinNumber) && chainMinNumber > 0) { + effectiveMinReplications = Math.max(chainMinNumber, resolvedUserMin); + } else { + effectiveMinReplications = resolvedUserMin; + } + + if (effectiveMinReplications === 0) { + this.logger.error( + `Effective minimum replications resolved to 0 for operationId: ${operationId}, blockchain: ${blockchain}. This should never happen.`, + ); + } + const publisherNodePeerId = this.networkModuleManager.getPeerId().toB58String(); await this.pendingStorageService.cacheDataset( operationId, @@ -80,7 +113,7 @@ class PublishController extends BaseController { blockchain, operationId, storeType: LOCAL_STORE_TYPES.TRIPLE, - minimumNumberOfNodeReplications, + minimumNumberOfNodeReplications: effectiveMinReplications, }, transactional: false, priority: COMMAND_PRIORITY.HIGHEST, diff --git a/src/modules/blockchain/blockchain-module-manager.js b/src/modules/blockchain/blockchain-module-manager.js index 11313d3473..d128cf616d 100644 --- a/src/modules/blockchain/blockchain-module-manager.js +++ b/src/modules/blockchain/blockchain-module-manager.js @@ -211,6 +211,10 @@ class BlockchainModuleManager extends BaseModuleManager { return this.callImplementationFunction(blockchain, 'getMaximumStake'); } + async getMinimumRequiredSignatures(blockchain) { + return this.callImplementationFunction(blockchain, 'getMinimumRequiredSignatures'); + } + async getLatestBlock(blockchain) { return this.callImplementationFunction(blockchain, 'getLatestBlock'); } diff --git a/src/modules/blockchain/implementation/web3-service.js b/src/modules/blockchain/implementation/web3-service.js index 4b1149cce0..1f3157f515 100644 --- a/src/modules/blockchain/implementation/web3-service.js +++ b/src/modules/blockchain/implementation/web3-service.js @@ -1024,6 +1024,15 @@ class Web3Service { return Number(ethers.utils.formatEther(maximumStake)); } + async getMinimumRequiredSignatures() { + return this.callContractFunction( + this.contracts.ParametersStorage, + 'minimumRequiredSignatures', + [], + CONTRACTS.PARAMETERS_STORAGE, + ); + } + async getShardingTableHead() { return this.callContractFunction(this.contracts.ShardingTableStorage, 'head', []); } diff --git a/src/modules/triple-store/implementation/ot-blazegraph/ot-blazegraph.js b/src/modules/triple-store/implementation/ot-blazegraph/ot-blazegraph.js index 16a4d9af36..ee4c478b1a 100644 --- a/src/modules/triple-store/implementation/ot-blazegraph/ot-blazegraph.js +++ b/src/modules/triple-store/implementation/ot-blazegraph/ot-blazegraph.js @@ -177,12 +177,24 @@ class OtBlazegraph extends OtTripleStore { } async queryVoid(repository, query, timeout) { - return axios.post(this.repositories[repository].sparqlEndpoint, query, { - headers: { - 'Content-Type': 'application/sparql-update; charset=UTF-8', - 'X-BIGDATA-MAX-QUERY-MILLIS': timeout, - }, - }); + try { + return await axios.post(this.repositories[repository].sparqlEndpoint, query, { + headers: { + 'Content-Type': 'application/sparql-update; charset=UTF-8', + 'X-BIGDATA-MAX-QUERY-MILLIS': timeout, + }, + }); + } catch (error) { + const status = error?.response?.status; + const dataSnippet = + typeof error?.response?.data === 'string' ? error.response.data.slice(0, 200) : ''; + this.logger.error( + `[OtBlazegraph.queryVoid] Update failed for ${repository} (status: ${status}): ${ + error.message + }${dataSnippet ? ` | data: ${dataSnippet}` : ''}`, + ); + throw error; + } } async deleteRepository(repository) { diff --git a/src/service/operation-id-service.js b/src/service/operation-id-service.js index b959699e21..1f91780c65 100644 --- a/src/service/operation-id-service.js +++ b/src/service/operation-id-service.js @@ -150,6 +150,33 @@ class OperationIdService { delete this.memoryCachedHandlersData[operationId]; } + getOperationIdMemoryCacheSizeBytes() { + let total = 0; + for (const operationId in this.memoryCachedHandlersData) { + const { data } = this.memoryCachedHandlersData[operationId]; + total += Buffer.from(JSON.stringify(data)).byteLength; + } + return total; + } + + async getOperationIdFileCacheSizeBytes() { + const cacheFolderPath = this.fileService.getOperationIdCachePath(); + const cacheFolderExists = await this.fileService.pathExists(cacheFolderPath); + if (!cacheFolderExists) return 0; + + const fileList = await this.fileService.readDirectory(cacheFolderPath); + const sizeResults = await Promise.allSettled( + fileList.map((fileName) => + this.fileService + .stat(path.join(cacheFolderPath, fileName)) + .then((stats) => stats.size), + ), + ); + return sizeResults + .filter((res) => res.status === 'fulfilled') + .reduce((acc, res) => acc + res.value, 0); + } + async removeExpiredOperationIdMemoryCache(expiredTimeout) { const now = Date.now(); let deleted = 0; diff --git a/src/service/operation-service.js b/src/service/operation-service.js index 64d7a0b314..10d84b2f48 100644 --- a/src/service/operation-service.js +++ b/src/service/operation-service.js @@ -58,21 +58,31 @@ class OperationService { return operationIdStatuses; } - async markOperationAsCompleted(operationId, blockchain, responseData, endStatuses) { + async markOperationAsCompleted( + operationId, + blockchain, + responseData, + endStatuses, + options = {}, + ) { + const { reuseExistingCache = false } = options; this.logger.info(`Finalizing ${this.operationName} for operationId: ${operationId}`); + await this.repositoryModuleManager.updateOperationStatus( + this.operationName, + operationId, + OPERATION_STATUS.COMPLETED, + ); + if (responseData === null) { await this.operationIdService.removeOperationIdCache(operationId); } else { await this.operationIdService.cacheOperationIdDataToMemory(operationId, responseData); - await this.operationIdService.cacheOperationIdDataToFile(operationId, responseData); + if (!reuseExistingCache) { + await this.operationIdService.cacheOperationIdDataToFile(operationId, responseData); + } } - await this.repositoryModuleManager.updateOperationStatus( - this.operationName, - operationId, - OPERATION_STATUS.COMPLETED, - ); for (let i = 0; i < endStatuses.length; i += 1) { const status = endStatuses[i]; const response = { diff --git a/src/service/publish-service.js b/src/service/publish-service.js index 6f043c1a52..1e6046240d 100644 --- a/src/service/publish-service.js +++ b/src/service/publish-service.js @@ -69,32 +69,45 @@ class PublishService extends OperationService { // } // 2. Check if all responses have been received - if (totalResponses === numberOfFoundNodes) { - // 2.1 If minimum replication is reached, mark the operation as completed - if (completedNumber >= minAckResponses) { - await this.markOperationAsCompleted( - operationId, - blockchain, - null, - this.completedStatuses, - ); - await this.repositoryModuleManager.updateMinAcksReached(operationId, true); - this.logResponsesSummary(completedNumber, failedNumber); - } - // 2.2 Otherwise, mark as failed - else { - await this.markOperationAsFailed( - operationId, - blockchain, - 'Not replicated to enough nodes!', - this.errorType, - ); - this.operationIdService.emitChangeEvent( - OPERATION_ID_STATUS.PUBLISH.PUBLISH_FAILED, - operationId, - ); - this.logResponsesSummary(completedNumber, failedNumber); - } + // 2.1 If minimum replication is reached, mark the operation as completed + + const record = await this.operationIdService.getOperationIdRecord(operationId); + if (record?.minAcksReached) return; + + if (completedNumber >= minAckResponses) { + this.logger.info( + `[PUBLISH] Minimum replication reached for operationId: ${operationId}, ` + + `datasetRoot: ${datasetRoot}, completed: ${completedNumber}/${minAckResponses}`, + ); + const cachedData = + (await this.operationIdService.getCachedOperationIdData(operationId)) || null; + await this.markOperationAsCompleted( + operationId, + blockchain, + cachedData, + this.completedStatuses, + { reuseExistingCache: true }, + ); + await this.repositoryModuleManager.updateMinAcksReached(operationId, true); + this.logResponsesSummary(completedNumber, failedNumber); + } + // 2.2 Otherwise, mark as failed + else if (totalResponses === numberOfFoundNodes) { + this.logger.warn( + `[PUBLISH] Failed for operationId: ${operationId}, ` + + `only ${completedNumber}/${minAckResponses} nodes responded successfully`, + ); + await this.markOperationAsFailed( + operationId, + blockchain, + 'Not replicated to enough nodes!', + this.errorType, + ); + this.operationIdService.emitChangeEvent( + OPERATION_ID_STATUS.PUBLISH.PUBLISH_FAILED, + operationId, + ); + this.logResponsesSummary(completedNumber, failedNumber); } // else { // // 3. Not all responses have arrived yet. diff --git a/test/unit/commands/operation-id-cleaner-command.test.js b/test/unit/commands/operation-id-cleaner-command.test.js new file mode 100644 index 0000000000..c62734034c --- /dev/null +++ b/test/unit/commands/operation-id-cleaner-command.test.js @@ -0,0 +1,100 @@ +import { describe, it, beforeEach, afterEach } from 'mocha'; +import { expect } from 'chai'; +import sinon from 'sinon'; + +import OperationIdCleanerCommand from '../../../src/commands/cleaners/operation-id-cleaner-command.js'; +import { + OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + OPERATION_ID_FILES_FOR_REMOVAL_MAX_NUMBER, + OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS, + OPERATION_ID_STATUS, +} from '../../../src/constants/constants.js'; + +describe('OperationIdCleanerCommand', () => { + let clock; + let operationIdService; + let repositoryModuleManager; + let logger; + let command; + + beforeEach(() => { + clock = sinon.useFakeTimers(new Date('2023-01-01T00:00:00Z').getTime()); + + operationIdService = { + getOperationIdMemoryCacheSizeBytes: sinon.stub().returns(1024), + getOperationIdFileCacheSizeBytes: sinon.stub().resolves(2048), + removeExpiredOperationIdMemoryCache: sinon.stub().resolves(512), + removeExpiredOperationIdFileCache: sinon.stub().resolves(3), + }; + + repositoryModuleManager = { + removeOperationIdRecord: sinon.stub().resolves(), + }; + + logger = { + debug: sinon.spy(), + info: sinon.spy(), + warn: sinon.spy(), + error: sinon.spy(), + }; + + command = new OperationIdCleanerCommand({ + logger, + repositoryModuleManager, + operationIdService, + fileService: {}, + }); + }); + + afterEach(() => { + clock.restore(); + }); + + it('cleans memory with 1h TTL and files with 24h TTL while reporting footprint', async () => { + await command.execute(); + + expect(operationIdService.getOperationIdMemoryCacheSizeBytes.calledOnce).to.be.true; + expect(operationIdService.getOperationIdFileCacheSizeBytes.calledOnce).to.be.true; + + expect( + repositoryModuleManager.removeOperationIdRecord.calledWith( + Date.now() - OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + [OPERATION_ID_STATUS.COMPLETED, OPERATION_ID_STATUS.FAILED], + ), + ).to.be.true; + + expect( + operationIdService.removeExpiredOperationIdMemoryCache.calledWith( + OPERATION_ID_MEMORY_CLEANUP_TIME_MILLS, + ), + ).to.be.true; + + expect( + operationIdService.removeExpiredOperationIdFileCache.calledWith( + OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + OPERATION_ID_FILES_FOR_REMOVAL_MAX_NUMBER, + ), + ).to.be.true; + + expect(logger.debug.called).to.be.true; + }); + + it('handles missing memory cache gracefully', async () => { + operationIdService.getOperationIdMemoryCacheSizeBytes.throws(new Error('no memory cache')); + await command.execute(); + + expect( + repositoryModuleManager.removeOperationIdRecord.calledWith( + Date.now() - OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + [OPERATION_ID_STATUS.COMPLETED, OPERATION_ID_STATUS.FAILED], + ), + ).to.be.true; + + expect( + operationIdService.removeExpiredOperationIdFileCache.calledWith( + OPERATION_ID_COMMAND_CLEANUP_TIME_MILLS, + OPERATION_ID_FILES_FOR_REMOVAL_MAX_NUMBER, + ), + ).to.be.true; + }); +}); diff --git a/test/unit/controllers/publish-http-api-controller-v1.test.js b/test/unit/controllers/publish-http-api-controller-v1.test.js new file mode 100644 index 0000000000..3f25856b28 --- /dev/null +++ b/test/unit/controllers/publish-http-api-controller-v1.test.js @@ -0,0 +1,132 @@ +import { describe, it } from 'mocha'; +import { expect } from 'chai'; + +import PublishController from '../../../src/controllers/http-api/v1/publish-http-api-controller-v1.js'; +import { PUBLISH_MIN_NUM_OF_NODE_REPLICATIONS } from '../../../src/constants/constants.js'; + +const createRes = () => { + const res = { + statusCode: null, + body: null, + status(code) { + this.statusCode = code; + return this; + }, + json(payload) { + this.body = payload; + return this; + }, + send(payload) { + this.body = payload; + return this; + }, + }; + return res; +}; + +describe('publish-http-api-controller-v1', () => { + const baseCtx = () => { + const addedCommands = []; + return { + commandExecutor: { + add: async (cmd) => { + addedCommands.push(cmd); + }, + _added: addedCommands, + }, + publishService: { + getOperationName: () => 'publish', + }, + operationIdService: { + generateOperationId: async () => 'op-id-123', + emitChangeEvent: () => {}, + updateOperationIdStatus: async () => {}, + cacheOperationIdDataToMemory: async () => {}, + cacheOperationIdDataToFile: async () => {}, + }, + repositoryModuleManager: { + createOperationRecord: async () => {}, + }, + pendingStorageService: { + cacheDataset: async () => {}, + }, + networkModuleManager: { + getPeerId: () => ({ toB58String: () => 'peer-self' }), + }, + blockchainModuleManager: { + getMinimumRequiredSignatures: async () => PUBLISH_MIN_NUM_OF_NODE_REPLICATIONS, + }, + logger: { + info: () => {}, + warn: () => {}, + error: () => {}, + }, + }; + }; + + it('clamps minimumNumberOfNodeReplications to on-chain minimum', async () => { + const ctx = baseCtx(); + ctx.blockchainModuleManager.getMinimumRequiredSignatures = async () => 5; // on-chain min + const controller = new PublishController(ctx); + + const req = { + body: { + dataset: { public: {} }, + datasetRoot: '0xroot', + blockchain: 'hardhat', + minimumNumberOfNodeReplications: 2, // below chain min + }, + }; + const res = createRes(); + + await controller.handleRequest(req, res); + + expect(res.statusCode).to.equal(202); + const added = ctx.commandExecutor._added[0]; + expect(added.data.minimumNumberOfNodeReplications).to.equal(5); + }); + + it('allows higher user override than on-chain minimum', async () => { + const ctx = baseCtx(); + ctx.blockchainModuleManager.getMinimumRequiredSignatures = async () => 3; // on-chain min + const controller = new PublishController(ctx); + + const req = { + body: { + dataset: { public: {} }, + datasetRoot: '0xroot', + blockchain: 'hardhat', + minimumNumberOfNodeReplications: 7, // above chain min + }, + }; + const res = createRes(); + + await controller.handleRequest(req, res); + + expect(res.statusCode).to.equal(202); + const added = ctx.commandExecutor._added[0]; + expect(added.data.minimumNumberOfNodeReplications).to.equal(7); + }); + + it('falls back to on-chain minimum when user value is zero or invalid', async () => { + const ctx = baseCtx(); + ctx.blockchainModuleManager.getMinimumRequiredSignatures = async () => 4; // on-chain min + const controller = new PublishController(ctx); + + const req = { + body: { + dataset: { public: {} }, + datasetRoot: '0xroot', + blockchain: 'hardhat', + minimumNumberOfNodeReplications: 0, // invalid/zero + }, + }; + const res = createRes(); + + await controller.handleRequest(req, res); + + expect(res.statusCode).to.equal(202); + const added = ctx.commandExecutor._added[0]; + expect(added.data.minimumNumberOfNodeReplications).to.equal(4); + }); +}); diff --git a/test/unit/service/operation-id-service-cache.test.js b/test/unit/service/operation-id-service-cache.test.js new file mode 100644 index 0000000000..43b40c219f --- /dev/null +++ b/test/unit/service/operation-id-service-cache.test.js @@ -0,0 +1,64 @@ +import { describe, it, beforeEach, afterEach } from 'mocha'; +import { expect } from 'chai'; +import fs from 'fs/promises'; +import path from 'path'; +import os from 'os'; +import OperationIdService from '../../../src/service/operation-id-service.js'; + +describe('OperationIdService file cache cleanup', () => { + let tmpDir; + let service; + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'opid-cache-')); + const now = Date.now(); + + // Older than TTL (2 hours) + const oldFile = path.join(tmpDir, 'old.json'); + await fs.writeFile(oldFile, '{}'); + await fs.utimes( + oldFile, + new Date(now - 2 * 60 * 60 * 1000), + new Date(now - 2 * 60 * 60 * 1000), + ); + + // Newer than TTL (10 minutes) + const newFile = path.join(tmpDir, 'new.json'); + await fs.writeFile(newFile, '{}'); + await fs.utimes(newFile, new Date(now - 10 * 60 * 1000), new Date(now - 10 * 60 * 1000)); + + const fileService = { + getOperationIdCachePath: () => tmpDir, + async pathExists(p) { + try { + await fs.stat(p); + return true; + } catch { + return false; + } + }, + readDirectory: (p) => fs.readdir(p), + stat: (p) => fs.stat(p), + removeFile: (p) => fs.rm(p, { force: true }), + }; + + service = new OperationIdService({ + logger: { debug: () => {}, warn: () => {}, error: () => {} }, + fileService, + repositoryModuleManager: {}, + eventEmitter: { emit: () => {} }, + }); + }); + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes only files older than TTL', async () => { + const deleted = await service.removeExpiredOperationIdFileCache(60 * 60 * 1000, 10); + const remainingFiles = await fs.readdir(tmpDir); + + expect(deleted).to.equal(1); + expect(remainingFiles).to.deep.equal(['new.json']); + }); +});