diff --git a/CHANGES.txt b/CHANGES.txt index 63a22deb..7447fcdb 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,7 @@ +2.11.0 (January XX, 2026) + - Added metadata to SDK_UPDATE events to indicate the type of update (FLAGS_UPDATE or SEGMENTS_UPDATE) and the names of updated flags or segments. + - Added metadata to SDK_READY and SDK_READY_FROM_CACHE events, including `initialCacheLoad` (boolean: `true` for fresh install/first app launch, `false` for warm cache/second app launch) and `lastUpdateTimestamp` (milliseconds since epoch). + 2.10.1 (December 18, 2025) - Bugfix - Handle `null` prerequisites properly. diff --git a/package-lock.json b/package-lock.json index 6e4ab5b1..8b14e3a9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.10.1", + "version": "2.10.2-rc.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@splitsoftware/splitio-commons", - "version": "2.10.1", + "version": "2.10.2-rc.6", "license": "Apache-2.0", "dependencies": { "@types/ioredis": "^4.28.0", diff --git a/package.json b/package.json index b003a6c4..2aa27c8a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@splitsoftware/splitio-commons", - "version": "2.10.1", + "version": "2.10.2-rc.6", "description": "Split JavaScript SDK common components", "main": "cjs/index.js", "module": "esm/index.js", diff --git a/src/readiness/__tests__/readinessManager.spec.ts b/src/readiness/__tests__/readinessManager.spec.ts index 34eaf9a3..2ece8380 100644 --- a/src/readiness/__tests__/readinessManager.spec.ts +++ b/src/readiness/__tests__/readinessManager.spec.ts @@ -1,8 +1,9 @@ import { readinessManagerFactory } from '../readinessManager'; import { EventEmitter } from '../../utils/MinEvents'; import { IReadinessManager } from '../types'; -import { SDK_READY, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_SPLITS_CACHE_LOADED, SDK_READY_TIMED_OUT } from '../constants'; +import { SDK_READY, SDK_UPDATE, SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_READY_FROM_CACHE, SDK_SPLITS_CACHE_LOADED, SDK_READY_TIMED_OUT, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../constants'; import { ISettings } from '../../types'; +import { SdkUpdateMetadata, SdkReadyMetadata } from '../../../types/splitio'; const settings = { startup: { @@ -99,15 +100,13 @@ test('READINESS MANAGER / Ready from cache event should be fired once', (done) = counter++; }); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); setTimeout(() => { - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); }, 0); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); - readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: undefined }); setTimeout(() => { expect(counter).toBe(1); // should be called only once @@ -300,3 +299,139 @@ test('READINESS MANAGER / Destroy before it was ready and timedout', (done) => { }, settingsWithTimeout.startup.readyTimeout * 1.5); }); + +test('READINESS MANAGER / SDK_UPDATE should emit with metadata', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + // SDK_READY + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + const metadata: SdkUpdateMetadata = { + type: FLAGS_UPDATE, + names: ['flag1', 'flag2'] + }; + + let receivedMetadata: SdkUpdateMetadata | undefined; + readinessManager.gate.on(SDK_UPDATE, (meta: SdkUpdateMetadata) => { + receivedMetadata = meta; + }); + + readinessManager.splits.emit(SDK_SPLITS_ARRIVED, metadata); + + expect(receivedMetadata).toEqual(metadata); +}); + +test('READINESS MANAGER / SDK_UPDATE should handle undefined metadata', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + // SDK_READY + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + let receivedMetadata: any; + readinessManager.gate.on(SDK_UPDATE, (meta: SdkUpdateMetadata) => { + receivedMetadata = meta; + }); + + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + + expect(receivedMetadata).toBeUndefined(); +}); + +test('READINESS MANAGER / SDK_UPDATE should forward metadata from segments', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + // SDK_READY + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + const metadata: SdkUpdateMetadata = { + type: SEGMENTS_UPDATE, + names: [] + }; + + let receivedMetadata: SdkUpdateMetadata | undefined; + readinessManager.gate.on(SDK_UPDATE, (meta: SdkUpdateMetadata) => { + receivedMetadata = meta; + }); + + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED, metadata); + + expect(receivedMetadata).toEqual(metadata); +}); + +test('READINESS MANAGER / SDK_READY_FROM_CACHE should emit with metadata when cache is loaded', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + const cacheTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago + let receivedMetadata: SdkReadyMetadata | undefined; + readinessManager.gate.on(SDK_READY_FROM_CACHE, (meta: SdkReadyMetadata) => { + receivedMetadata = meta; + }); + + // Emit cache loaded event with timestamp + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { + initialCacheLoad: false, + lastUpdateTimestamp: cacheTimestamp + }); + + expect(receivedMetadata).toBeDefined(); + expect(receivedMetadata!.initialCacheLoad).toBe(false); + expect(receivedMetadata!.lastUpdateTimestamp).toBe(cacheTimestamp); +}); + +test('READINESS MANAGER / SDK_READY_FROM_CACHE should emit with metadata when SDK becomes ready without cache', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + let receivedMetadata: SdkReadyMetadata | undefined; + readinessManager.gate.on(SDK_READY_FROM_CACHE, (meta: SdkReadyMetadata) => { + receivedMetadata = meta; + }); + + // Make SDK ready without cache first + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + expect(receivedMetadata).toBeDefined(); + expect(receivedMetadata!.initialCacheLoad).toBe(true); + expect(receivedMetadata!.lastUpdateTimestamp).toBeUndefined(); +}); + +test('READINESS MANAGER / SDK_READY should emit with metadata when ready from cache', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + const cacheTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago + // First emit cache loaded with timestamp + readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: cacheTimestamp }); + + let receivedMetadata: SdkReadyMetadata | undefined; + readinessManager.gate.on(SDK_READY, (meta: SdkReadyMetadata) => { + receivedMetadata = meta; + }); + + // Make SDK ready + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + expect(receivedMetadata).toBeDefined(); + expect(receivedMetadata!.initialCacheLoad).toBe(false); // Was ready from cache first + expect(receivedMetadata!.lastUpdateTimestamp).toBe(cacheTimestamp); +}); + +test('READINESS MANAGER / SDK_READY should emit with metadata when ready without cache', () => { + const readinessManager = readinessManagerFactory(EventEmitter, settings); + + let receivedMetadata: SdkReadyMetadata | undefined; + readinessManager.gate.on(SDK_READY, (meta: SdkReadyMetadata) => { + receivedMetadata = meta; + }); + + // Make SDK ready without cache + readinessManager.splits.emit(SDK_SPLITS_ARRIVED); + readinessManager.segments.emit(SDK_SEGMENTS_ARRIVED); + + expect(receivedMetadata).toBeDefined(); + expect(receivedMetadata!.initialCacheLoad).toBe(true); // Was not ready from cache + expect(receivedMetadata!.lastUpdateTimestamp).toBeUndefined(); // No cache timestamp when fresh install +}); diff --git a/src/readiness/__tests__/sdkReadinessManager.spec.ts b/src/readiness/__tests__/sdkReadinessManager.spec.ts index 5861942f..63174eea 100644 --- a/src/readiness/__tests__/sdkReadinessManager.spec.ts +++ b/src/readiness/__tests__/sdkReadinessManager.spec.ts @@ -219,7 +219,7 @@ describe('SDK Readiness Manager - Promises', () => { const sdkReadinessManager = sdkReadinessManagerFactory(EventEmitter, fullSettings); // make the SDK ready from cache - sdkReadinessManager.readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED); + sdkReadinessManager.readinessManager.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp: null }); expect(await sdkReadinessManager.sdkStatus.whenReadyFromCache()).toBe(false); // validate error log for SDK_READY_FROM_CACHE diff --git a/src/readiness/constants.ts b/src/readiness/constants.ts index 061bbd67..f08cf546 100644 --- a/src/readiness/constants.ts +++ b/src/readiness/constants.ts @@ -10,3 +10,7 @@ export const SDK_READY_TIMED_OUT = 'init::timeout'; export const SDK_READY = 'init::ready'; export const SDK_READY_FROM_CACHE = 'init::cache-ready'; export const SDK_UPDATE = 'state::update'; + +// SdkUpdateMetadata types: +export const FLAGS_UPDATE = 'FLAGS_UPDATE'; +export const SEGMENTS_UPDATE = 'SEGMENTS_UPDATE'; diff --git a/src/readiness/readinessManager.ts b/src/readiness/readinessManager.ts index 319e843d..c0bbff78 100644 --- a/src/readiness/readinessManager.ts +++ b/src/readiness/readinessManager.ts @@ -1,6 +1,6 @@ import { objectAssign } from '../utils/lang/objectAssign'; import { ISettings } from '../types'; -import SplitIO from '../../types/splitio'; +import SplitIO, { SdkReadyMetadata } from '../../types/splitio'; import { SDK_SPLITS_ARRIVED, SDK_SPLITS_CACHE_LOADED, SDK_SEGMENTS_ARRIVED, SDK_READY_TIMED_OUT, SDK_READY_FROM_CACHE, SDK_UPDATE, SDK_READY } from './constants'; import { IReadinessEventEmitter, IReadinessManager, ISegmentsEventEmitter, ISplitsEventEmitter } from './types'; @@ -15,7 +15,7 @@ function splitsEventEmitterFactory(EventEmitter: new () => SplitIO.IEventEmitter // `isSplitKill` condition avoids an edge-case of wrongly emitting SDK_READY if: // - `/memberships` fetch and SPLIT_KILL occurs before `/splitChanges` fetch, and // - storage has cached splits (for which case `splitsStorage.killLocally` can return true) - splitsEventEmitter.on(SDK_SPLITS_ARRIVED, (isSplitKill: boolean) => { if (!isSplitKill) splitsEventEmitter.splitsArrived = true; }); + splitsEventEmitter.on(SDK_SPLITS_ARRIVED, (metadata: SplitIO.SdkUpdateMetadata, isSplitKill: boolean) => { if (!isSplitKill) splitsEventEmitter.splitsArrived = true; }); splitsEventEmitter.once(SDK_SPLITS_CACHE_LOADED, () => { splitsEventEmitter.splitsCacheLoaded = true; }); return splitsEventEmitter; @@ -53,6 +53,10 @@ export function readinessManagerFactory( lastUpdate = dateNow > lastUpdate ? dateNow : lastUpdate + 1; } + let metadataReady: SdkReadyMetadata = { + initialCacheLoad: true + }; + // emit SDK_READY_FROM_CACHE let isReadyFromCache = false; if (splits.splitsCacheLoaded) isReadyFromCache = true; // ready from cache, but doesn't emit SDK_READY_FROM_CACHE @@ -84,13 +88,14 @@ export function readinessManagerFactory( splits.initCallbacks.push(__init); if (splits.hasInit) __init(); - function checkIsReadyFromCache() { + function checkIsReadyFromCache(cacheMetadata: SdkReadyMetadata) { + metadataReady = cacheMetadata; isReadyFromCache = true; // Don't emit SDK_READY_FROM_CACHE if SDK_READY has been emitted if (!isReady && !isDestroyed) { try { syncLastUpdate(); - gate.emit(SDK_READY_FROM_CACHE, isReady); + gate.emit(SDK_READY_FROM_CACHE, cacheMetadata); } catch (e) { // throws user callback exceptions in next tick setTimeout(() => { throw e; }, 0); @@ -98,12 +103,12 @@ export function readinessManagerFactory( } } - function checkIsReadyOrUpdate(diff: any) { + function checkIsReadyOrUpdate(metadata: SplitIO.SdkUpdateMetadata) { if (isDestroyed) return; if (isReady) { try { syncLastUpdate(); - gate.emit(SDK_UPDATE, diff); + gate.emit(SDK_UPDATE, metadata); } catch (e) { // throws user callback exceptions in next tick setTimeout(() => { throw e; }, 0); @@ -116,9 +121,13 @@ export function readinessManagerFactory( syncLastUpdate(); if (!isReadyFromCache) { isReadyFromCache = true; - gate.emit(SDK_READY_FROM_CACHE, isReady); + const metadataReadyFromCache: SplitIO.SdkReadyMetadata = { + initialCacheLoad: true, // Fresh install, no cache existed + lastUpdateTimestamp: undefined // No cache timestamp when fresh install + }; + gate.emit(SDK_READY_FROM_CACHE, metadataReadyFromCache); } - gate.emit(SDK_READY); + gate.emit(SDK_READY, metadataReady); } catch (e) { // throws user callback exceptions in next tick setTimeout(() => { throw e; }, 0); diff --git a/src/readiness/types.ts b/src/readiness/types.ts index 2de99b43..03ac2a0f 100644 --- a/src/readiness/types.ts +++ b/src/readiness/types.ts @@ -1,5 +1,30 @@ import SplitIO from '../../types/splitio'; +/** Readiness event types */ + +export type SDK_READY_TIMED_OUT = 'init::timeout' +export type SDK_READY = 'init::ready' +export type SDK_READY_FROM_CACHE = 'init::cache-ready' +export type SDK_UPDATE = 'state::update' +export type SDK_DESTROY = 'state::destroy' + +export type IReadinessEvent = SDK_READY_TIMED_OUT | SDK_READY | SDK_READY_FROM_CACHE | SDK_UPDATE | SDK_DESTROY + +export interface IReadinessEventEmitter extends SplitIO.IEventEmitter { + emit(event: IReadinessEvent, ...args: any[]): boolean + on(event: SDK_READY, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + on(event: SDK_READY_FROM_CACHE, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + on(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: SDK_READY, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + once(event: SDK_READY_FROM_CACHE, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + once(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + addListener(event: SDK_READY, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + addListener(event: SDK_READY_FROM_CACHE, listener: (metadata: SplitIO.SdkReadyMetadata) => void): this; + addListener(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; +} /** Splits data emitter */ type SDK_SPLITS_ARRIVED = 'state::splits-arrived' @@ -9,6 +34,7 @@ type ISplitsEvent = SDK_SPLITS_ARRIVED | SDK_SPLITS_CACHE_LOADED export interface ISplitsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISplitsEvent, ...args: any[]): boolean on(event: ISplitsEvent, listener: (...args: any[]) => void): this; + on(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; once(event: ISplitsEvent, listener: (...args: any[]) => void): this; splitsArrived: boolean splitsCacheLoaded: boolean @@ -24,23 +50,11 @@ type ISegmentsEvent = SDK_SEGMENTS_ARRIVED export interface ISegmentsEventEmitter extends SplitIO.IEventEmitter { emit(event: ISegmentsEvent, ...args: any[]): boolean on(event: ISegmentsEvent, listener: (...args: any[]) => void): this; + on(event: SDK_UPDATE, listener: (metadata: SplitIO.SdkUpdateMetadata) => void): this; once(event: ISegmentsEvent, listener: (...args: any[]) => void): this; segmentsArrived: boolean } -/** Readiness emitter */ - -export type SDK_READY_TIMED_OUT = 'init::timeout' -export type SDK_READY = 'init::ready' -export type SDK_READY_FROM_CACHE = 'init::cache-ready' -export type SDK_UPDATE = 'state::update' -export type SDK_DESTROY = 'state::destroy' -export type IReadinessEvent = SDK_READY_TIMED_OUT | SDK_READY | SDK_READY_FROM_CACHE | SDK_UPDATE | SDK_DESTROY - -export interface IReadinessEventEmitter extends SplitIO.IEventEmitter { - emit(event: IReadinessEvent, ...args: any[]): boolean -} - /** Readiness manager */ export interface IReadinessManager { diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 02d3e0df..c7c0c5fb 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -65,7 +65,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ISDK | SplitIO.IA if (initialRolloutPlan) { setRolloutPlan(log, initialRolloutPlan, storage as IStorageSync, key && getMatching(key)); - if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + if ((storage as IStorageSync).splits.getChangeNumber() > -1) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false /* Not an initial load, cache exists */ }); } const clients: Record = {}; diff --git a/src/storages/inLocalStorage/__tests__/validateCache.spec.ts b/src/storages/inLocalStorage/__tests__/validateCache.spec.ts index 7feea541..6ddfa134 100644 --- a/src/storages/inLocalStorage/__tests__/validateCache.spec.ts +++ b/src/storages/inLocalStorage/__tests__/validateCache.spec.ts @@ -29,8 +29,10 @@ describe.each(storages)('validateCache', (storage) => { for (let i = 0; i < storage.length; i++) storage.removeItem(storage.key(i) as string); }); - test('if there is no cache, it should return false', async () => { - expect(await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(false); + test('if there is no cache, it should return initialCacheLoad: true', async () => { + const result = await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result.initialCacheLoad).toBe(true); + expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).not.toHaveBeenCalled(); @@ -44,12 +46,16 @@ describe.each(storages)('validateCache', (storage) => { expect(storage.getItem(keys.buildLastClear())).toBeNull(); }); - test('if there is cache and it must not be cleared, it should return true', async () => { + test('if there is cache and it must not be cleared, it should return initialCacheLoad: false', async () => { + const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago storage.setItem(keys.buildSplitsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); + storage.setItem(keys.buildLastUpdatedKey(), lastUpdateTimestamp + ''); await storage.save && storage.save(); - expect(await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(true); + const result = await validateCache({}, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result.initialCacheLoad).toBe(false); + expect(result.lastUpdateTimestamp).toBe(lastUpdateTimestamp); expect(logSpy).not.toHaveBeenCalled(); @@ -63,13 +69,15 @@ describe.each(storages)('validateCache', (storage) => { expect(storage.getItem(keys.buildLastClear())).toBeNull(); }); - test('if there is cache and it has expired, it should clear cache and return false', async () => { + test('if there is cache and it has expired, it should clear cache and return initialCacheLoad: true', async () => { storage.setItem(keys.buildSplitsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); storage.setItem(keys.buildLastUpdatedKey(), Date.now() - 1000 * 60 * 60 * 24 * 2 + ''); // 2 days ago await storage.save && storage.save(); - expect(await validateCache({ expirationDays: 1 }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(false); + const result = await validateCache({ expirationDays: 1 }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result.initialCacheLoad).toBe(true); + expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: Cache expired more than 1 days ago. Cleaning up cache'); @@ -82,12 +90,14 @@ describe.each(storages)('validateCache', (storage) => { expect(nearlyEqual(parseInt(storage.getItem(keys.buildLastClear()) as string), Date.now())).toBe(true); }); - test('if there is cache and its hash has changed, it should clear cache and return false', async () => { + test('if there is cache and its hash has changed, it should clear cache and return initialCacheLoad: true', async () => { storage.setItem(keys.buildSplitsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); await storage.save && storage.save(); - expect(await validateCache({}, storage, { ...fullSettings, core: { ...fullSettings.core, authorizationKey: 'another-sdk-key' } }, keys, splits, rbSegments, segments, largeSegments)).toBe(false); + const result = await validateCache({}, storage, { ...fullSettings, core: { ...fullSettings.core, authorizationKey: 'another-sdk-key' } }, keys, splits, rbSegments, segments, largeSegments); + expect(result.initialCacheLoad).toBe(true); + expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: SDK key, flags filter criteria, or flags spec version has changed. Cleaning up cache'); @@ -100,14 +110,16 @@ describe.each(storages)('validateCache', (storage) => { expect(nearlyEqual(parseInt(storage.getItem(keys.buildLastClear()) as string), Date.now())).toBe(true); }); - test('if there is cache and clearOnInit is true, it should clear cache and return false', async () => { + test('if there is cache and clearOnInit is true, it should clear cache and return initialCacheLoad: true', async () => { // Older cache version (without last clear) storage.removeItem(keys.buildLastClear()); storage.setItem(keys.buildSplitsTillKey(), '1'); storage.setItem(keys.buildHashKey(), FULL_SETTINGS_HASH); await storage.save && storage.save(); - expect(await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(false); + const result = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result.initialCacheLoad).toBe(true); + expect(result.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: clearOnInit was set and cache was not cleared in the last 24 hours. Cleaning up cache'); @@ -122,14 +134,20 @@ describe.each(storages)('validateCache', (storage) => { // If cache is cleared, it should not clear again until a day has passed logSpy.mockClear(); + const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago storage.setItem(keys.buildSplitsTillKey(), '1'); - expect(await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(true); + storage.setItem(keys.buildLastUpdatedKey(), lastUpdateTimestamp + ''); + const result2 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result2.initialCacheLoad).toBe(false); + expect(result2.lastUpdateTimestamp).toBe(lastUpdateTimestamp); expect(logSpy).not.toHaveBeenCalled(); expect(storage.getItem(keys.buildLastClear())).toBe(lastClear); // Last clear should not have changed // If a day has passed, it should clear again storage.setItem(keys.buildLastClear(), (Date.now() - 1000 * 60 * 60 * 24 - 1) + ''); - expect(await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments)).toBe(false); + const result3 = await validateCache({ clearOnInit: true }, storage, fullSettings, keys, splits, rbSegments, segments, largeSegments); + expect(result3.initialCacheLoad).toBe(true); + expect(result3.lastUpdateTimestamp).toBeUndefined(); expect(logSpy).toHaveBeenCalledWith('storage:localstorage: clearOnInit was set and cache was not cleared in the last 24 hours. Cleaning up cache'); expect(splits.clear).toHaveBeenCalledTimes(2); expect(rbSegments.clear).toHaveBeenCalledTimes(2); diff --git a/src/storages/inLocalStorage/index.ts b/src/storages/inLocalStorage/index.ts index fa19081e..cc5d38f4 100644 --- a/src/storages/inLocalStorage/index.ts +++ b/src/storages/inLocalStorage/index.ts @@ -54,7 +54,7 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt const rbSegments = new RBSegmentsCacheInLocal(settings, keys, storage); const segments = new MySegmentsCacheInLocal(log, keys, storage); const largeSegments = new MySegmentsCacheInLocal(log, myLargeSegmentsKeyBuilder(prefix, matchingKey), storage); - let validateCachePromise: Promise | undefined; + let validateCachePromise: Promise | undefined; return { splits, @@ -68,7 +68,10 @@ export function InLocalStorage(options: SplitIO.InLocalStorageOptions = {}): ISt uniqueKeys: new UniqueKeysCacheInMemoryCS(), validateCache() { - return validateCachePromise || (validateCachePromise = validateCache(options, storage, settings, keys, splits, rbSegments, segments, largeSegments)); + if (!validateCachePromise) { + validateCachePromise = validateCache(options, storage, settings, keys, splits, rbSegments, segments, largeSegments); + } + return validateCachePromise; }, save() { diff --git a/src/storages/inLocalStorage/validateCache.ts b/src/storages/inLocalStorage/validateCache.ts index 73397075..d9fa8de0 100644 --- a/src/storages/inLocalStorage/validateCache.ts +++ b/src/storages/inLocalStorage/validateCache.ts @@ -66,14 +66,19 @@ function validateExpiration(options: SplitIO.InLocalStorageOptions, storage: Sto * - its hash has changed, i.e., the SDK key, flags filter criteria or flags spec version was modified * - `clearOnInit` was set and cache was not cleared in the last 24 hours * - * @returns `true` if cache is ready to be used, `false` otherwise (cache was cleared or there is no cache) + * @returns Metadata object with `initialCacheLoad` (true if is fresh install, false if is ready from cache) and `lastUpdateTimestamp` (timestamp of last cache update or undefined) */ -export function validateCache(options: SplitIO.InLocalStorageOptions, storage: StorageAdapter, settings: ISettings, keys: KeyBuilderCS, splits: SplitsCacheInLocal, rbSegments: RBSegmentsCacheInLocal, segments: MySegmentsCacheInLocal, largeSegments: MySegmentsCacheInLocal): Promise { +export function validateCache(options: SplitIO.InLocalStorageOptions, storage: StorageAdapter, settings: ISettings, keys: KeyBuilderCS, splits: SplitsCacheInLocal, rbSegments: RBSegmentsCacheInLocal, segments: MySegmentsCacheInLocal, largeSegments: MySegmentsCacheInLocal): Promise { return Promise.resolve(storage.load && storage.load()).then(() => { const currentTimestamp = Date.now(); const isThereCache = splits.getChangeNumber() > -1; + // Get lastUpdateTimestamp from storage + const lastUpdatedTimestampStr = storage.getItem(keys.buildLastUpdatedKey()); + const lastUpdatedTimestamp = lastUpdatedTimestampStr ? parseInt(lastUpdatedTimestampStr, 10) : undefined; + const lastUpdateTimestamp = (!isNaNNumber(lastUpdatedTimestamp) && lastUpdatedTimestamp !== undefined) ? lastUpdatedTimestamp : undefined; + if (validateExpiration(options, storage, settings, keys, currentTimestamp, isThereCache)) { splits.clear(); rbSegments.clear(); @@ -90,10 +95,16 @@ export function validateCache(options: SplitIO.InLocalStorageOptions, storage: S // Persist clear if (storage.save) storage.save(); - return false; + return { + initialCacheLoad: true, // Cache was cleared, so this is an initial load (no cache existed) + lastUpdateTimestamp: undefined + }; } // Check if ready from cache - return isThereCache; + return { + initialCacheLoad: !isThereCache, // true if no cache exists (initial load), false if cache exists (ready from cache) + lastUpdateTimestamp + }; }); } diff --git a/src/storages/inRedis/__tests__/TelemetryCacheInRedis.spec.ts b/src/storages/inRedis/__tests__/TelemetryCacheInRedis.spec.ts index fb80ffce..f446e7d2 100644 --- a/src/storages/inRedis/__tests__/TelemetryCacheInRedis.spec.ts +++ b/src/storages/inRedis/__tests__/TelemetryCacheInRedis.spec.ts @@ -10,78 +10,88 @@ const latencyKey = `${prefix}.telemetry.latencies`; const initKey = `${prefix}.telemetry.init`; const fieldVersionablePrefix = `${metadata.s}/${metadata.n}/${metadata.i}`; -test('TELEMETRY CACHE IN REDIS', async () => { +describe('TELEMETRY CACHE IN REDIS', () => { + let connection: RedisAdapter; + let cache: TelemetryCacheInRedis; + let keysBuilder: KeyBuilderSS; - const keysBuilder = new KeyBuilderSS(prefix, metadata); - const connection = new RedisAdapter(loggerMock); - const cache = new TelemetryCacheInRedis(loggerMock, keysBuilder, connection); + beforeEach(async () => { + keysBuilder = new KeyBuilderSS(prefix, metadata); + connection = new RedisAdapter(loggerMock); + cache = new TelemetryCacheInRedis(loggerMock, keysBuilder, connection); - // recordException - expect(await cache.recordException('tr')).toBe(1); - expect(await cache.recordException('tr')).toBe(2); - expect(await cache.recordException('tcfs')).toBe(1); + await connection.del(exceptionKey); + await connection.del(latencyKey); + await connection.del(initKey); + }); - expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/track')).toBe('2'); - expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/treatment')).toBe(null); - expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/treatmentsWithConfigByFlagSets')).toBe('1'); + test('TELEMETRY CACHE IN REDIS', async () => { - // recordLatency - expect(await cache.recordLatency('tr', 1.6)).toBe(1); - expect(await cache.recordLatency('tr', 1.6)).toBe(2); - expect(await cache.recordLatency('tfs', 1.6)).toBe(1); + // recordException + expect(await cache.recordException('tr')).toBe(1); + expect(await cache.recordException('tr')).toBe(2); + expect(await cache.recordException('tcfs')).toBe(1); - expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/track/2')).toBe('2'); - expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/treatment/2')).toBe(null); - expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/treatmentsByFlagSets/2')).toBe('1'); + expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/track')).toBe('2'); + expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/treatment')).toBe(null); + expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/treatmentsWithConfigByFlagSets')).toBe('1'); - // recordConfig - expect(await cache.recordConfig()).toBe(1); - expect(JSON.parse(await connection.hget(initKey, fieldVersionablePrefix) as string)).toEqual({ - oM: 1, - st: 'redis', - aF: 0, - rF: 0 - }); + // recordLatency + expect(await cache.recordLatency('tr', 1.6)).toBe(1); + expect(await cache.recordLatency('tr', 1.6)).toBe(2); + expect(await cache.recordLatency('tfs', 1.6)).toBe(1); - // popLatencies - const latencies = await cache.popLatencies(); - latencies.forEach((latency, m) => { - expect(JSON.parse(m)).toEqual(metadata); - expect(latency).toEqual({ - tfs: [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - tr: [0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - }); - }); - expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/track/2')).toBe(null); - - // popExceptions - const exceptions = await cache.popExceptions(); - exceptions.forEach((exception, m) => { - expect(JSON.parse(m)).toEqual(metadata); - expect(exception).toEqual({ - tcfs: 1, - tr: 2, - }); - }); - expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/track')).toBe(null); + expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/track/2')).toBe('2'); + expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/treatment/2')).toBe(null); + expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/treatmentsByFlagSets/2')).toBe('1'); - // popConfig - const configs = await cache.popConfigs(); - configs.forEach((config, m) => { - expect(JSON.parse(m)).toEqual(metadata); - expect(config).toEqual({ + // recordConfig + expect(await cache.recordConfig()).toBe(1); + expect(JSON.parse(await connection.hget(initKey, fieldVersionablePrefix) as string)).toEqual({ oM: 1, st: 'redis', aF: 0, rF: 0 }); - }); - expect(await connection.hget(initKey, fieldVersionablePrefix)).toBe(null); - // pops when there is no data - expect((await cache.popLatencies()).size).toBe(0); - expect((await cache.popExceptions()).size).toBe(0); - expect((await cache.popConfigs()).size).toBe(0); + // popLatencies + const latencies = await cache.popLatencies(); + latencies.forEach((latency, m) => { + expect(JSON.parse(m)).toEqual(metadata); + expect(latency).toEqual({ + tfs: [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + tr: [0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + }); + }); + expect(await connection.hget(latencyKey, fieldVersionablePrefix + '/track/2')).toBe(null); + + // popExceptions + const exceptions = await cache.popExceptions(); + exceptions.forEach((exception, m) => { + expect(JSON.parse(m)).toEqual(metadata); + expect(exception).toEqual({ + tcfs: 1, + tr: 2, + }); + }); + expect(await connection.hget(exceptionKey, fieldVersionablePrefix + '/track')).toBe(null); - await connection.disconnect(); + // popConfig + const configs = await cache.popConfigs(); + configs.forEach((config, m) => { + expect(JSON.parse(m)).toEqual(metadata); + expect(config).toEqual({ + oM: 1, + st: 'redis', + aF: 0, + rF: 0 + }); + }); + expect(await connection.hget(initKey, fieldVersionablePrefix)).toBe(null); + + // pops when there is no data + expect((await cache.popLatencies()).size).toBe(0); + expect((await cache.popExceptions()).size).toBe(0); + expect((await cache.popConfigs()).size).toBe(0); + }); }); diff --git a/src/storages/types.ts b/src/storages/types.ts index 3662047f..fea0cc2b 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -499,7 +499,7 @@ export interface IStorageSync extends IStorageBase< IUniqueKeysCacheSync > { // Defined in client-side - validateCache?: () => Promise, + validateCache?: () => Promise, largeSegments?: ISegmentsCacheSync, } diff --git a/src/sync/__tests__/syncManagerOnline.spec.ts b/src/sync/__tests__/syncManagerOnline.spec.ts index fc6cefb6..c78b9215 100644 --- a/src/sync/__tests__/syncManagerOnline.spec.ts +++ b/src/sync/__tests__/syncManagerOnline.spec.ts @@ -100,7 +100,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy // Test pushManager for main client const syncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore - storage: { validateCache: () => false }, + storage: { validateCache: () => { return Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }); } }, }); expect(pushManagerFactoryMock).not.toBeCalled(); @@ -169,7 +169,7 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy // pushManager instantiation control test const testSyncManager = syncManagerOnlineFactory(() => pollingManagerMock, pushManagerFactoryMock)({ settings, // @ts-ignore - storage: { validateCache: () => false }, + storage: { validateCache: () => Promise.resolve({ initialCacheLoad: true, lastUpdateTimestamp: undefined }) }, }); expect(pushManagerFactoryMock).toBeCalled(); @@ -183,17 +183,18 @@ test('syncManagerOnline should syncAll a single time when sync is disabled', asy }); -test('syncManagerOnline should emit SDK_SPLITS_CACHE_LOADED if validateCache returns true', async () => { +test('syncManagerOnline should emit SDK_SPLITS_CACHE_LOADED if validateCache returns false', async () => { + const lastUpdateTimestamp = Date.now() - 1000 * 60 * 60; // 1 hour ago const params = { settings: fullSettings, - storage: { validateCache: () => true }, + storage: { validateCache: () => Promise.resolve({ initialCacheLoad: false, lastUpdateTimestamp }) }, readiness: { splits: { emit: jest.fn() } } }; // @ts-ignore const syncManager = syncManagerOnlineFactory()(params); await syncManager.start(); - expect(params.readiness.splits.emit).toBeCalledWith(SDK_SPLITS_CACHE_LOADED); + expect(params.readiness.splits.emit).toBeCalledWith(SDK_SPLITS_CACHE_LOADED, { initialCacheLoad: false, lastUpdateTimestamp }); syncManager.stop(); }); diff --git a/src/sync/offline/syncTasks/fromObjectSyncTask.ts b/src/sync/offline/syncTasks/fromObjectSyncTask.ts index 96bc8384..cc2ffcb7 100644 --- a/src/sync/offline/syncTasks/fromObjectSyncTask.ts +++ b/src/sync/offline/syncTasks/fromObjectSyncTask.ts @@ -7,7 +7,7 @@ import { syncTaskFactory } from '../../syncTask'; import { ISyncTask } from '../../types'; import { ISettings } from '../../../types'; import { CONTROL } from '../../../utils/constants'; -import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED, SDK_SPLITS_CACHE_LOADED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { SYNC_OFFLINE_DATA, ERROR_SYNC_OFFLINE_LOADING } from '../../../logger/constants'; /** @@ -55,15 +55,17 @@ export function fromObjectUpdaterFactory( splitsCache.clear(), // required to sync removed splits from mock splitsCache.update(splits, [], Date.now()) ]).then(() => { - readiness.splits.emit(SDK_SPLITS_ARRIVED); + readiness.splits.emit(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [] }); if (startingUp) { startingUp = false; - Promise.resolve(storage.validateCache ? storage.validateCache() : false).then((isCacheLoaded) => { + Promise.resolve(storage.validateCache ? storage.validateCache() : { initialCacheLoad: true /* Fallback: assume initial load when validateCache doesn't exist */ }).then((cacheMetadata) => { // Emits SDK_READY_FROM_CACHE - if (isCacheLoaded) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + if (!cacheMetadata.initialCacheLoad) { + readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, cacheMetadata); + } // Emits SDK_READY - readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + readiness.segments.emit(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); }); } return true; diff --git a/src/sync/polling/pollingManagerCS.ts b/src/sync/polling/pollingManagerCS.ts index 5e197e62..5c1169d3 100644 --- a/src/sync/polling/pollingManagerCS.ts +++ b/src/sync/polling/pollingManagerCS.ts @@ -9,6 +9,7 @@ import { SDK_SPLITS_ARRIVED, SDK_SEGMENTS_ARRIVED } from '../../readiness/consta import { POLLING_SMART_PAUSING, POLLING_START, POLLING_STOP } from '../../logger/constants'; import { ISdkFactoryContextSync } from '../../sdkFactory/types'; import { usesSegmentsSync } from '../../storages/AbstractSplitsCacheSync'; +import { SdkUpdateMetadata } from '../../../types/splitio'; /** * Expose start / stop mechanism for polling data from services. @@ -59,8 +60,8 @@ export function pollingManagerCSFactory( const mySegmentsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMemberships, storage, readiness, settings, matchingKey); // smart ready - function smartReady() { - if (!readiness.isReady() && !usesSegmentsSync(storage)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + function smartReady(metadata: SdkUpdateMetadata) { + if (!readiness.isReady() && !usesSegmentsSync(storage)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED, metadata); } if (!usesSegmentsSync(storage)) setTimeout(smartReady, 0); else readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady); diff --git a/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts b/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts new file mode 100644 index 00000000..2fabc173 --- /dev/null +++ b/src/sync/polling/updaters/__tests__/mySegmentsUpdater.spec.ts @@ -0,0 +1,107 @@ +import { readinessManagerFactory } from '../../../../readiness/readinessManager'; +import { MySegmentsCacheInMemory } from '../../../../storages/inMemory/MySegmentsCacheInMemory'; +import { mySegmentsUpdaterFactory } from '../mySegmentsUpdater'; +import { fullSettings } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; +import { EventEmitter } from '../../../../utils/MinEvents'; +import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; +import { IMySegmentsFetcher } from '../../fetchers/types'; +import { IMembershipsResponse } from '../../../../dtos/types'; +import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../../readiness/constants'; +import { MySegmentsData } from '../../types'; +import { MEMBERSHIPS_MS_UPDATE } from '../../../streaming/constants'; +import { IStorageSync } from '../../../../storages/types'; +import { SplitsCacheInMemory } from '../../../../storages/inMemory/SplitsCacheInMemory'; +import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; + +describe('mySegmentsUpdater', () => { + const segments = new MySegmentsCacheInMemory(); + const largeSegments = new MySegmentsCacheInMemory(); + const splits = new SplitsCacheInMemory(); + const rbSegments = new RBSegmentsCacheInMemory(); + const storage: IStorageSync = { + segments, + largeSegments, + splits, + rbSegments, + impressions: {} as any, + events: {} as any, + impressionCounts: {} as any, + telemetry: undefined, + uniqueKeys: {} as any, + save: () => {}, + destroy: () => {} + }; + const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); + const segmentsEmitSpy = jest.spyOn(readinessManager.segments, 'emit'); + + beforeEach(() => { + jest.clearAllMocks(); + storage.segments.clear(); + readinessManager.segments.segmentsArrived = false; + }); + + test('test with mySegments update - should emit SEGMENTS_UPDATE metadata', async () => { + const mockMySegmentsFetcher: IMySegmentsFetcher = jest.fn().mockResolvedValue({ + ms: { 'segment1': true, 'segment2': true }, + ls: {} + } as IMembershipsResponse); + + const mySegmentsUpdater = mySegmentsUpdaterFactory( + loggerMock, + mockMySegmentsFetcher, + storage, + readinessManager.segments, + 1000, + 1, + 'test-key' + ); + + await mySegmentsUpdater(); + + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with mySegments data payload - should emit SEGMENTS_UPDATE metadata', async () => { + const segmentsData: MySegmentsData = { + type: MEMBERSHIPS_MS_UPDATE, + cn: 123, + added: ['segment1', 'segment2'], + removed: [] + }; + + const mySegmentsUpdater = mySegmentsUpdaterFactory( + loggerMock, + jest.fn().mockResolvedValue({ ms: {}, ls: {} } as IMembershipsResponse), + storage, + readinessManager.segments, + 1000, + 1, + 'test-key' + ); + + await mySegmentsUpdater(segmentsData); + + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with empty mySegments - should still emit SEGMENTS_UPDATE metadata', async () => { + const mockMySegmentsFetcher: IMySegmentsFetcher = jest.fn().mockResolvedValue({ + ms: {}, + ls: {} + } as IMembershipsResponse); + + const mySegmentsUpdater = mySegmentsUpdaterFactory( + loggerMock, + mockMySegmentsFetcher, + storage, + readinessManager.segments, + 1000, + 1, + 'test-key' + ); + + await mySegmentsUpdater(); + + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); +}); diff --git a/src/sync/polling/updaters/__tests__/segmentChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/segmentChangesUpdater.spec.ts new file mode 100644 index 00000000..43119f20 --- /dev/null +++ b/src/sync/polling/updaters/__tests__/segmentChangesUpdater.spec.ts @@ -0,0 +1,134 @@ +import { readinessManagerFactory } from '../../../../readiness/readinessManager'; +import { SegmentsCacheInMemory } from '../../../../storages/inMemory/SegmentsCacheInMemory'; +import { segmentChangesUpdaterFactory } from '../segmentChangesUpdater'; +import { fullSettings } from '../../../../utils/settingsValidation/__tests__/settings.mocks'; +import { EventEmitter } from '../../../../utils/MinEvents'; +import { loggerMock } from '../../../../logger/__tests__/sdkLogger.mock'; +import { ISegmentChangesFetcher } from '../../fetchers/types'; +import { ISegmentChangesResponse } from '../../../../dtos/types'; +import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../../readiness/constants'; + +describe('segmentChangesUpdater', () => { + const segments = new SegmentsCacheInMemory(); + const updateSegments = jest.spyOn(segments, 'update'); + + const readinessManager = readinessManagerFactory(EventEmitter, fullSettings); + const segmentsEmitSpy = jest.spyOn(readinessManager.segments, 'emit'); + + beforeEach(() => { + jest.clearAllMocks(); + segments.clear(); + readinessManager.segments.segmentsArrived = false; + }); + + test('test with segments update - should emit updatedSegments and NOT updatedFlags', async () => { + const segmentName = 'test-segment'; + const segmentChange: ISegmentChangesResponse = { + name: segmentName, + added: ['key1', 'key2'], + removed: [], + since: -1, + till: 123 + }; + + const mockSegmentChangesFetcher: ISegmentChangesFetcher = jest.fn().mockResolvedValue([segmentChange]); + + const segmentChangesUpdater = segmentChangesUpdaterFactory( + loggerMock, + mockSegmentChangesFetcher, + segments, + readinessManager, + 1000, + 1 + ); + + segments.registerSegments([segmentName]); + + await segmentChangesUpdater(undefined, segmentName); + + expect(updateSegments).toHaveBeenCalledWith(segmentName, segmentChange.added, segmentChange.removed, segmentChange.till); + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with multiple segments update - should emit SEGMENTS_UPDATE metadata once', async () => { + const segment1 = 'segment1'; + const segment2 = 'segment2'; + const segment3 = 'segment3'; + + const segmentChange1: ISegmentChangesResponse = { + name: segment1, + added: ['key1'], + removed: [], + since: -1, + till: 100 + }; + + const segmentChange2: ISegmentChangesResponse = { + name: segment2, + added: ['key2'], + removed: [], + since: -1, + till: 101 + }; + + const segmentChange3: ISegmentChangesResponse = { + name: segment3, + added: ['key3'], + removed: [], + since: -1, + till: 102 + }; + + const mockSegmentChangesFetcher: ISegmentChangesFetcher = jest.fn().mockResolvedValue([ + segmentChange1, + segmentChange2, + segmentChange3 + ]); + + const segmentChangesUpdater = segmentChangesUpdaterFactory( + loggerMock, + mockSegmentChangesFetcher, + segments, + readinessManager, + 1000, + 1 + ); + + segments.registerSegments([segment1, segment2, segment3]); + + // Update all segments at once + await segmentChangesUpdater(undefined); + + // Should emit once when all segments are updated + expect(segmentsEmitSpy).toHaveBeenCalledTimes(1); + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with empty segments - should still emit SEGMENTS_UPDATE metadata', async () => { + const segmentName = 'empty-segment'; + const segmentChange: ISegmentChangesResponse = { + name: segmentName, + added: [], + removed: [], + since: -1, + till: 123 + }; + + const mockSegmentChangesFetcher: ISegmentChangesFetcher = jest.fn().mockResolvedValue([segmentChange]); + + const segmentChangesUpdater = segmentChangesUpdaterFactory( + loggerMock, + mockSegmentChangesFetcher, + segments, + readinessManager, + 1000, + 1 + ); + + segments.registerSegments([segmentName]); + + await segmentChangesUpdater(undefined, segmentName); + + expect(segmentsEmitSpy).toBeCalledWith(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); +}); diff --git a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts index b93a7176..5398e06b 100644 --- a/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts +++ b/src/sync/polling/updaters/__tests__/splitChangesUpdater.spec.ts @@ -15,6 +15,7 @@ import { splitNotifications } from '../../../streaming/__tests__/dataMocks'; import { RBSegmentsCacheInMemory } from '../../../../storages/inMemory/RBSegmentsCacheInMemory'; import { RB_SEGMENT_UPDATE, SPLIT_UPDATE } from '../../../streaming/constants'; import { IN_RULE_BASED_SEGMENT } from '../../../../utils/constants'; +import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../../readiness/constants'; const ARCHIVED_FF = 'ARCHIVED'; @@ -120,6 +121,7 @@ test('splitChangesUpdater / compute splits mutation', () => { expect(splitsMutation.added).toEqual([activeSplitWithSegments]); expect(splitsMutation.removed).toEqual([archivedSplit]); + expect(splitsMutation.names).toEqual([activeSplitWithSegments.name, archivedSplit.name]); expect(Array.from(segments)).toEqual(['A', 'B']); // SDK initialization without sets @@ -129,6 +131,7 @@ test('splitChangesUpdater / compute splits mutation', () => { expect(splitsMutation.added).toEqual([testFFSetsAB, test2FFSetsX]); expect(splitsMutation.removed).toEqual([]); + expect(splitsMutation.names).toEqual([testFFSetsAB.name, test2FFSetsX.name]); expect(Array.from(segments)).toEqual([]); }); @@ -142,24 +145,28 @@ test('splitChangesUpdater / compute splits mutation with filters', () => { // should add it to mutations expect(splitsMutation.added).toEqual([testFFSetsAB]); expect(splitsMutation.removed).toEqual([]); + expect(splitsMutation.names).toEqual([testFFSetsAB.name]); // fetching existing test feature flag removed from set B splitsMutation = computeMutation([testFFRemoveSetB], new Set(), splitFiltersValidation); expect(splitsMutation.added).toEqual([testFFRemoveSetB]); expect(splitsMutation.removed).toEqual([]); + expect(splitsMutation.names).toEqual([testFFRemoveSetB.name]); // fetching existing test feature flag removed from set B splitsMutation = computeMutation([testFFRemoveSetA], new Set(), splitFiltersValidation); expect(splitsMutation.added).toEqual([]); expect(splitsMutation.removed).toEqual([testFFRemoveSetA]); + expect(splitsMutation.names).toEqual([testFFRemoveSetA.name]); // fetching existing test feature flag removed from set B splitsMutation = computeMutation([testFFEmptySet], new Set(), splitFiltersValidation); expect(splitsMutation.added).toEqual([]); expect(splitsMutation.removed).toEqual([testFFEmptySet]); + expect(splitsMutation.names).toEqual([testFFEmptySet.name]); // SDK initialization with names: ['test2'] splitFiltersValidation = { queryString: '&names=test2', groupedFilters: { bySet: [], byName: ['test2'], byPrefix: [] }, validFilters: [] }; @@ -167,11 +174,13 @@ test('splitChangesUpdater / compute splits mutation with filters', () => { expect(splitsMutation.added).toEqual([]); expect(splitsMutation.removed).toEqual([testFFSetsAB]); + expect(splitsMutation.names).toEqual([testFFSetsAB.name]); splitsMutation = computeMutation([test2FFSetsX, testFFEmptySet], new Set(), splitFiltersValidation); expect(splitsMutation.added).toEqual([test2FFSetsX]); expect(splitsMutation.removed).toEqual([testFFEmptySet]); + expect(splitsMutation.names).toEqual([test2FFSetsX.name, testFFEmptySet.name]); }); describe('splitChangesUpdater', () => { @@ -204,6 +213,7 @@ describe('splitChangesUpdater', () => { test('test without payload', async () => { const result = await splitChangesUpdater(); + const updatedFlags = splitChangesMock1.ff.d.map(ff => ff.name); expect(fetchSplitChanges).toBeCalledTimes(1); expect(fetchSplitChanges).lastCalledWith(-1, undefined, undefined, -1); @@ -211,7 +221,7 @@ describe('splitChangesUpdater', () => { expect(updateSplits).lastCalledWith(splitChangesMock1.ff.d, [], splitChangesMock1.ff.t); expect(updateRbSegments).toBeCalledTimes(0); // no rbSegments to update expect(registerSegments).toBeCalledTimes(1); - expect(splitsEmitSpy).toBeCalledWith('state::splits-arrived'); + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: updatedFlags }); expect(result).toBe(true); }); @@ -276,7 +286,8 @@ describe('splitChangesUpdater', () => { // emit always if not configured sets for (const setMock of setMocks) { await expect(splitChangesUpdater(undefined, undefined, { payload: { ...payload, sets: setMock.sets, status: 'ACTIVE' }, changeNumber: index, type: SPLIT_UPDATE })).resolves.toBe(true); - expect(splitsEmitSpy.mock.calls[index][0]).toBe('state::splits-arrived'); + expect(splitsEmitSpy.mock.calls[index][0]).toBe(SDK_SPLITS_ARRIVED); + expect(splitsEmitSpy.mock.calls[index][1]).toEqual({ type: FLAGS_UPDATE, names: [payload.name] }); index++; } @@ -294,4 +305,123 @@ describe('splitChangesUpdater', () => { } }); + + test('test with ff payload - should emit metadata with flag name', async () => { + splitsEmitSpy.mockClear(); + + readinessManager.splits.splitsArrived = false; + storage.splits.clear(); + + const payload = splitNotifications[0].decoded as Pick; + const changeNumber = payload.changeNumber; + + await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); + }); + + test('test with multiple flags updated - should emit metadata with all flag names', async () => { + splitsEmitSpy.mockClear(); + storage.splits.clear(); + storage.segments.clear(); + // Start with splitsArrived = false so it emits on first update + readinessManager.splits.splitsArrived = false; + readinessManager.segments.segmentsArrived = true; // Segments ready + + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 100, conditions: [] } as unknown as ISplit; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 101, conditions: [] } as unknown as ISplit; + const flag3 = { name: 'flag3', status: 'ACTIVE', changeNumber: 102, conditions: [] } as unknown as ISplit; + + fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2, flag3], t: 102 } } }); + await splitChangesUpdater(); + + // Should emit with metadata when splitsArrived is false (first update) + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2', 'flag3'] }); + }); + + test('test with ARCHIVED flag - should emit metadata with flag name', async () => { + splitsEmitSpy.mockClear(); + storage.splits.clear(); + storage.segments.clear(); + // Start with splitsArrived = false so it emits on first update + readinessManager.splits.splitsArrived = false; + readinessManager.segments.segmentsArrived = true; // Segments ready + + const archivedFlag = { name: 'archived-flag', status: ARCHIVED_FF, changeNumber: 200, conditions: [] } as unknown as ISplit; + + const payload = archivedFlag as Pick; + const changeNumber = payload.changeNumber; + + await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: SPLIT_UPDATE })).resolves.toBe(true); + + // Should emit with metadata when splitsArrived is false (first update) + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: [payload.name] }); + }); + + test('test with rbsegment payload - should emit SEGMENTS_UPDATE not FLAGS_UPDATE', async () => { + splitsEmitSpy.mockClear(); + readinessManager.splits.splitsArrived = true; + storage.rbSegments.clear(); + + const payload = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; + const changeNumber = payload.changeNumber; + + await expect(splitChangesUpdater(undefined, undefined, { payload, changeNumber: changeNumber, type: RB_SEGMENT_UPDATE })).resolves.toBe(true); + + // Should emit SEGMENTS_UPDATE (not FLAGS_UPDATE) when only RB segment is updated + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with only RB segment update and no flags - should emit SEGMENTS_UPDATE', async () => { + splitsEmitSpy.mockClear(); + readinessManager.splits.splitsArrived = true; + storage.splits.clear(); + storage.rbSegments.clear(); + + // Simulate a scenario where only RB segments are updated (no flags) + const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; + fetchMock.once('*', { status: 200, body: { rbs: { d: [rbSegment], t: 1684329854385 } } }); + await splitChangesUpdater(); + + // When updatedFlags.length === 0, should emit SEGMENTS_UPDATE + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); + }); + + test('test with both flags and RB segments updated - should emit FLAGS_UPDATE with flag names', async () => { + splitsEmitSpy.mockClear(); + readinessManager.splits.splitsArrived = true; + storage.splits.clear(); + storage.rbSegments.clear(); + storage.segments.clear(); + + // Simulate a scenario where both flags and RB segments are updated + const flag1 = { name: 'flag1', status: 'ACTIVE', changeNumber: 400, conditions: [] } as unknown as ISplit; + const flag2 = { name: 'flag2', status: 'ACTIVE', changeNumber: 401, conditions: [] } as unknown as ISplit; + const rbSegment = { name: 'rbsegment', status: 'ACTIVE', changeNumber: 1684329854385, conditions: [] } as unknown as IRBSegment; + + fetchMock.once('*', { status: 200, body: { ff: { d: [flag1, flag2], t: 401 }, rbs: { d: [rbSegment], t: 1684329854385 } } }); + await splitChangesUpdater(); + + // When both flags and RB segments are updated, should emit FLAGS_UPDATE with flag names + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['flag1', 'flag2'] }); + }); + + test('test client-side behavior - should emit even when segments not all fetched', async () => { + splitsEmitSpy.mockClear(); + storage.splits.clear(); + // Start with splitsArrived = false so it emits on first update + readinessManager.splits.splitsArrived = false; + readinessManager.segments.segmentsArrived = false; // Segments not ready - client-side should still emit + + // Create client-side updater (isClientSide = true) + const clientSideUpdater = splitChangesUpdaterFactory(loggerMock, splitChangesFetcher, storage, splitFiltersValidation, readinessManager.splits, 1000, 1, true); + + const flag1 = { name: 'client-flag', status: 'ACTIVE', changeNumber: 300, conditions: [] } as unknown as ISplit; + fetchMock.once('*', { status: 200, body: { ff: { d: [flag1], t: 300 } } }); + await clientSideUpdater(); + + // Client-side should emit even if segments aren't all fetched (isClientSide bypasses checkAllSegmentsExist) + expect(splitsEmitSpy).toBeCalledWith(SDK_SPLITS_ARRIVED, { type: FLAGS_UPDATE, names: ['client-flag'] }); + }); + }); diff --git a/src/sync/polling/updaters/mySegmentsUpdater.ts b/src/sync/polling/updaters/mySegmentsUpdater.ts index 5421d3f9..86f5cb0e 100644 --- a/src/sync/polling/updaters/mySegmentsUpdater.ts +++ b/src/sync/polling/updaters/mySegmentsUpdater.ts @@ -2,7 +2,7 @@ import { IMySegmentsFetcher } from '../fetchers/types'; import { IStorageSync } from '../../../storages/types'; import { ISegmentsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; +import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants'; import { MySegmentsData } from '../types'; @@ -56,7 +56,7 @@ export function mySegmentsUpdaterFactory( // Notify update if required if (usesSegmentsSync(storage) && (shouldNotifyUpdate || readyOnAlreadyExistentState)) { readyOnAlreadyExistentState = false; - segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED); + segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED, { type: SEGMENTS_UPDATE, names: [] }); } } diff --git a/src/sync/polling/updaters/segmentChangesUpdater.ts b/src/sync/polling/updaters/segmentChangesUpdater.ts index 8127a96c..5b27b361 100644 --- a/src/sync/polling/updaters/segmentChangesUpdater.ts +++ b/src/sync/polling/updaters/segmentChangesUpdater.ts @@ -1,10 +1,12 @@ import { ISegmentChangesFetcher } from '../fetchers/types'; import { ISegmentsCacheBase } from '../../../storages/types'; import { IReadinessManager } from '../../../readiness/types'; -import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants'; +import { SDK_SEGMENTS_ARRIVED, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { LOG_PREFIX_INSTANTIATION, LOG_PREFIX_SYNC_SEGMENTS } from '../../../logger/constants'; import { timeout } from '../../../utils/promise/timeout'; +import { SdkUpdateMetadata } from '../../../../types/splitio'; + type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) => Promise @@ -83,7 +85,13 @@ export function segmentChangesUpdaterFactory( // if at least one segment fetch succeeded, mark segments ready if (shouldUpdateFlags.some(update => update) || readyOnAlreadyExistentState) { readyOnAlreadyExistentState = false; - if (readiness) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); + if (readiness) { + const metadata: SdkUpdateMetadata = { + type: SEGMENTS_UPDATE, + names: [] + }; + readiness.segments.emit(SDK_SEGMENTS_ARRIVED, metadata); + } } return true; }); diff --git a/src/sync/polling/updaters/splitChangesUpdater.ts b/src/sync/polling/updaters/splitChangesUpdater.ts index 3a1fc5a7..27a68313 100644 --- a/src/sync/polling/updaters/splitChangesUpdater.ts +++ b/src/sync/polling/updaters/splitChangesUpdater.ts @@ -3,13 +3,14 @@ import { ISplitChangesFetcher } from '../fetchers/types'; import { IRBSegment, ISplit, ISplitChangesResponse, ISplitFiltersValidation, MaybeThenable } from '../../../dtos/types'; import { ISplitsEventEmitter } from '../../../readiness/types'; import { timeout } from '../../../utils/promise/timeout'; -import { SDK_SPLITS_ARRIVED } from '../../../readiness/constants'; +import { SDK_SPLITS_ARRIVED, FLAGS_UPDATE, SEGMENTS_UPDATE } from '../../../readiness/constants'; import { ILogger } from '../../../logger/types'; import { SYNC_SPLITS_FETCH, SYNC_SPLITS_UPDATE, SYNC_RBS_UPDATE, SYNC_SPLITS_FETCH_FAILS, SYNC_SPLITS_FETCH_RETRY } from '../../../logger/constants'; import { startsWith } from '../../../utils/lang'; import { IN_RULE_BASED_SEGMENT, IN_SEGMENT, RULE_BASED_SEGMENT, STANDARD_SEGMENT } from '../../../utils/constants'; import { setToArray } from '../../../utils/lang/sets'; import { SPLIT_UPDATE } from '../../streaming/constants'; +import { SdkUpdateMetadata } from '../../../../types/splitio'; export type InstantUpdate = { payload: ISplit | IRBSegment, changeNumber: number, type: string }; type SplitChangesUpdater = (noCache?: boolean, till?: number, instantUpdate?: InstantUpdate) => Promise @@ -54,7 +55,8 @@ export function parseSegments(ruleEntity: ISplit | IRBSegment, matcherType: type interface ISplitMutations { added: T[], - removed: T[] + removed: T[], + names: string[] } /** @@ -95,9 +97,10 @@ export function computeMutation(rules: Array, } else { accum.removed.push(ruleEntity); } + accum.names.push(ruleEntity.name); return accum; - }, { added: [], removed: [] } as ISplitMutations); + }, { added: [], removed: [], names: [] } as ISplitMutations); } /** @@ -165,9 +168,11 @@ export function splitChangesUpdaterFactory( .then((splitChanges: ISplitChangesResponse) => { const usedSegments = new Set(); + let updatedFlags: string[] = []; let ffUpdate: MaybeThenable = false; if (splitChanges.ff) { - const { added, removed } = computeMutation(splitChanges.ff.d, usedSegments, splitFiltersValidation); + const { added, removed, names } = computeMutation(splitChanges.ff.d, usedSegments, splitFiltersValidation); + updatedFlags = names; log.debug(SYNC_SPLITS_UPDATE, [added.length, removed.length]); ffUpdate = splits.update(added, removed, splitChanges.ff.t); } @@ -193,7 +198,13 @@ export function splitChangesUpdaterFactory( .catch(() => false /** noop. just to handle a possible `checkAllSegmentsExist` rejection, before emitting SDK event */) .then(emitSplitsArrivedEvent => { // emit SDK events - if (emitSplitsArrivedEvent) splitsEventEmitter.emit(SDK_SPLITS_ARRIVED); + if (emitSplitsArrivedEvent) { + const metadata: SdkUpdateMetadata = { + type: updatedFlags.length > 0 ? FLAGS_UPDATE : SEGMENTS_UPDATE, + names: updatedFlags.length > 0 ? updatedFlags : [] + }; + splitsEventEmitter.emit(SDK_SPLITS_ARRIVED, metadata); + } return true; }); } diff --git a/src/sync/syncManagerOnline.ts b/src/sync/syncManagerOnline.ts index 92dbc28f..df9ff152 100644 --- a/src/sync/syncManagerOnline.ts +++ b/src/sync/syncManagerOnline.ts @@ -92,12 +92,14 @@ export function syncManagerOnlineFactory( // @TODO once event, impression and telemetry storages support persistence, call when `validateCache` promise is resolved submitterManager.start(!isConsentGranted(settings)); - return Promise.resolve(storage.validateCache ? storage.validateCache() : false).then((isCacheLoaded) => { + return Promise.resolve(storage.validateCache ? storage.validateCache() : { initialCacheLoad: true /* Fallback: assume initial load when validateCache doesn't exist */ }).then((cacheMetadata) => { if (!running) return; if (startFirstTime) { // Emits SDK_READY_FROM_CACHE - if (isCacheLoaded) readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); + if (!cacheMetadata.initialCacheLoad) { + readiness.splits.emit(SDK_SPLITS_CACHE_LOADED, cacheMetadata); + } } diff --git a/src/utils/settingsValidation/storage/storageCS.ts b/src/utils/settingsValidation/storage/storageCS.ts index ef78ad84..04705253 100644 --- a/src/utils/settingsValidation/storage/storageCS.ts +++ b/src/utils/settingsValidation/storage/storageCS.ts @@ -8,7 +8,7 @@ import { IStorageFactoryParams, IStorageSync } from '../../../storages/types'; export function __InLocalStorageMockFactory(params: IStorageFactoryParams): IStorageSync { const result = InMemoryStorageCSFactory(params); - result.validateCache = () => Promise.resolve(true); // to emit SDK_READY_FROM_CACHE + result.validateCache = () => Promise.resolve({ initialCacheLoad: false /* Not an initial load, cache exists - to emit SDK_READY_FROM_CACHE */ }); return result; } __InLocalStorageMockFactory.type = STORAGE_MEMORY; diff --git a/types/splitio.d.ts b/types/splitio.d.ts index 1a505686..1b34772e 100644 --- a/types/splitio.d.ts +++ b/types/splitio.d.ts @@ -1,8 +1,8 @@ // Type definitions for Split Software SDKs // Project: https://www.split.io/ -import { RedisOptions } from 'ioredis'; -import { RequestOptions } from 'http'; +import type { RedisOptions } from 'ioredis'; +import type { RequestOptions } from 'http'; export as namespace SplitIO; export = SplitIO; @@ -492,6 +492,47 @@ declare namespace SplitIO { removeItem(key: string): void | Promise; } + /** + * Metadata for the update event emitted when the SDK cache is updated with new data for flags or segments. + */ + type SdkUpdateMetadata = { + /** + * The type of update event. + */ + type: 'FLAGS_UPDATE' | 'SEGMENTS_UPDATE' + /** + * The names of the flags or segments that were updated. + */ + names: string[] + } + + /** + * Metadata keys for SDK update events. + * Use the string literals directly: 'FLAGS_UPDATE' or 'SEGMENTS_UPDATE' + */ + type SdkUpdateMetadataKeys = { + FLAGS_UPDATE: 'FLAGS_UPDATE'; + SEGMENTS_UPDATE: 'SEGMENTS_UPDATE'; + }; + + /** + * Metadata for the ready events emitted when the SDK is ready to evaluate feature flags. + */ + type SdkReadyMetadata = { + /** + * Indicates whether the SDK was loaded from cache initially. (fresh install or ready from cache) + * - `false` when SDK_READY_FROM_CACHE is emitted from cache (before SDK_READY) + * - `false` when SDK_READY is emitted and the SDK was ready from cache first + * - `true` when SDK_READY_FROM_CACHE is emitted because SDK became ready without cache + * - `true` when SDK_READY is emitted and the SDK was not ready from cache + */ + initialCacheLoad: boolean + /** + * Timestamp in milliseconds since epoch when the cache was last updated. Undefined if `initialCacheLoad` is `true`. + */ + lastUpdateTimestamp?: number + } + /** * EventEmitter interface based on a subset of the Node.js EventEmitter methods. */ @@ -509,8 +550,17 @@ declare namespace SplitIO { * @see {@link https://nodejs.org/api/events.html} */ interface EventEmitter extends IEventEmitter { + addListener(event: EventConsts['SDK_READY'], listener: (metadata: SdkReadyMetadata) => void): this; + addListener(event: EventConsts['SDK_READY_FROM_CACHE'], listener: (metadata: SdkReadyMetadata) => void): this; + addListener(event: EventConsts['SDK_UPDATE'], listener: (metadata: SdkUpdateMetadata) => void): this; addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: EventConsts['SDK_READY'], listener: (metadata: SdkReadyMetadata) => void): this; + on(event: EventConsts['SDK_READY_FROM_CACHE'], listener: (metadata: SdkReadyMetadata) => void): this; + on(event: EventConsts['SDK_UPDATE'], listener: (metadata: SdkUpdateMetadata) => void): this; on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: EventConsts['SDK_READY'], listener: (metadata: SdkReadyMetadata) => void): this; + once(event: EventConsts['SDK_READY_FROM_CACHE'], listener: (metadata: SdkReadyMetadata) => void): this; + once(event: EventConsts['SDK_UPDATE'], listener: (metadata: SdkUpdateMetadata) => void): this; once(event: string | symbol, listener: (...args: any[]) => void): this; removeListener(event: string | symbol, listener: (...args: any[]) => void): this; off(event: string | symbol, listener: (...args: any[]) => void): this;