diff --git a/README.md b/README.md index a5eb56d..5d84292 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,48 @@ FronteggContext.init({ }); ``` +### Redis cache +Some parts of SDK can facilitate the Redis cache for the sake of performance. To set up the cache, pass additional options +to `FronteggContext.init(..)` call. +If no cache is configured, then data is cached locally, in NodeJS process memory. + +#### Redis cache with `ioredis` library +```javascript +const { FronteggContext } = require('@frontegg/client'); + +FronteggContext.init({ + FRONTEGG_CLIENT_ID: '', + FRONTEGG_API_KEY: '', +}, { + cache: { + type: 'ioredis', + options: { + host: 'localhost', + port: 6379, + password: '', + db: 10, + } + } +}); +``` + +#### Redis cache with `redis` library +```javascript +const { FronteggContext } = require('@frontegg/client'); + +FronteggContext.init({ + FRONTEGG_CLIENT_ID: '', + FRONTEGG_API_KEY: '', +}, { + cache: { + type: 'redis', + options: { + url: 'redis[s]://[[username][:password]@][host][:port][/db-number]', + } + } +}); +``` + ### Middleware Use Frontegg's "withAuthentication" auth guard to protect your routes. @@ -81,65 +123,12 @@ Head over to the Doc ### Access tokens When using M2M authentication, access tokens will be cached by the SDK. -By default access tokens will be cached locally, however you can use two other kinds of cache: +By default, access tokens will be cached locally, however you can use two other kinds of cache: - ioredis - redis -#### Use ioredis as your cache -When initializing your context, pass an access tokens options object with your ioredis parameters - -```javascript -const { FronteggContext } = require('@frontegg/client'); - -const accessTokensOptions = { - cache: { - type: 'ioredis', - options: { - host: 'localhost', - port: 6379, - password: '', - db: 10, - }, - }, -}; - -FronteggContext.init( - { - FRONTEGG_CLIENT_ID: '', - FRONTEGG_API_KEY: '', - }, - { - accessTokensOptions, - }, -); -``` - -#### Use redis as your cache -When initializing your context, pass an access tokens options object with your redis parameters - -```javascript -const { FronteggContext } = require('@frontegg/client'); - -const accessTokensOptions = { - cache: { - type: 'redis', - options: { - url: 'redis[s]://[[username][:password]@][host][:port][/db-number]', - }, - }, -}; - -FronteggContext.init( - { - FRONTEGG_CLIENT_ID: '', - FRONTEGG_API_KEY: '', - }, - { - accessTokensOptions, - }, -); -``` +For details on cache configuration, refer to Redis cache section. ### Clients @@ -154,7 +143,7 @@ const { AuditsClient } = require('@frontegg/client'); const audits = new AuditsClient(); // initialize the module -await audits.init('MY-CLIENT-ID', 'MY-AUDITS-KEY'); +await audits.init('', ''); ``` #### Sending audits @@ -295,7 +284,7 @@ const { IdentityClient } = require('@frontegg/client'); Then, initialize the client ```javascript -const identityClient = new IdentityClient({ FRONTEGG_CLIENT_ID: 'your-client-id', FRONTEGG_API_KEY: 'your-api-key' }); +const identityClient = new IdentityClient({ FRONTEGG_CLIENT_ID: '', FRONTEGG_API_KEY: '' }); ``` And use this client to validate diff --git a/ci/docker-compose.yml b/ci/docker-compose.yml new file mode 100644 index 0000000..1a1fb86 --- /dev/null +++ b/ci/docker-compose.yml @@ -0,0 +1,7 @@ +version: "3" +services: + redis: + image: redis + restart: always + ports: + - 36279:6379 diff --git a/ci/run-test-suite.sh b/ci/run-test-suite.sh new file mode 100755 index 0000000..48673c3 --- /dev/null +++ b/ci/run-test-suite.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e + +docker compose -p nodejs-sdk-tests up -d --wait + +npm run --prefix ../ test:jest --coverage +RESULT=$@ + +docker compose -p nodejs-sdk-tests down + +exit $RESULT \ No newline at end of file diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 464f018..2b53481 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,3 +1,25 @@ +# [6.0.0-alpha.1](https://github.com/frontegg/nodejs-sdk/compare/5.1.1-alpha.1...6.0.0-alpha.1) (2023-07-31) + + +### Code Refactoring + +* **sdk:** removed irrelevant accessTokenOptions; refactored cache manager implementations ([3bbe939](https://github.com/frontegg/nodejs-sdk/commit/3bbe93926e52eda261db11bb6fbdd65671074e4e)) + + +### Bug Fixes + +* **cache:** Bringing back the ICacheManager generic to the class level ([7d04440](https://github.com/frontegg/nodejs-sdk/commit/7d04440ab94e66d0155032597d42ec8b17c4b1da)) + + +### Features + +* **cache:** decoupled cache managers from AccessTokens ([85db523](https://github.com/frontegg/nodejs-sdk/commit/85db5230d7530e2e61dcea8e79174148e9cb1f6f)) + + +### BREAKING CHANGES + +* **sdk:** removed accessTokenOptions from FronteggContext configuration + ## [5.1.1-alpha.1](https://github.com/frontegg/nodejs-sdk/compare/5.1.0...5.1.1-alpha.1) (2023-07-30) diff --git a/jest.config.js b/jest.config.js index dd030d7..1332537 100644 --- a/jest.config.js +++ b/jest.config.js @@ -13,6 +13,7 @@ module.exports = { lines: 18, }, }, + setupFilesAfterEnv: ['jest-extended/all'], reporters: [ 'default', [ diff --git a/package-lock.json b/package-lock.json index 20b109f..f67b23c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -450,6 +450,27 @@ "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==" }, + "@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "requires": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "dependencies": { + "@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + } + } + }, "@dabh/diagnostics": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", @@ -1835,6 +1856,30 @@ "p-retry": "^4.0.0" } }, + "@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true + }, + "@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true + }, "@types/axios-mock-adapter": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/@types/axios-mock-adapter/-/axios-mock-adapter-1.10.0.tgz", @@ -2013,9 +2058,9 @@ "dev": true }, "@types/node": { - "version": "12.20.55", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.55.tgz", - "integrity": "sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==" + "version": "13.13.52", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.52.tgz", + "integrity": "sha512-s3nugnZumCC//n4moGGe6tkNMyYEdaDBitVjwPxXmR5lnMG5dHePinH2EdxkG3Rh1ghFHHixAG4NJhpJW1rthQ==" }, "@types/normalize-package-data": { "version": "2.4.1", @@ -2245,6 +2290,12 @@ "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true }, + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true + }, "agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -2324,6 +2375,12 @@ "picomatch": "^2.0.4" } }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -2906,6 +2963,12 @@ "path-type": "^4.0.0" } }, + "create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, "cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -4644,6 +4707,16 @@ } } }, + "jest-extended": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jest-extended/-/jest-extended-4.0.0.tgz", + "integrity": "sha512-GMhMFdrwhYPB0y+cmI/5esz+F/Xc0OIzKbnr8SaiZ74YcWamxf7sVT78YlA15+JIQMTlpHBEgcxheyRBdHFqPA==", + "dev": true, + "requires": { + "jest-diff": "^29.0.0", + "jest-get-type": "^29.0.0" + } + }, "jest-get-type": { "version": "29.4.3", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.4.3.tgz", @@ -9854,6 +9927,11 @@ "xtend": "~4.0.1" } }, + "tiny-typed-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tiny-typed-emitter/-/tiny-typed-emitter-2.1.0.tgz", + "integrity": "sha512-qVtvMxeXbVej0cQWKqVSSAHmKZEHAvxdF8HEUBFWts8h+xEo5m/lEiPakuyZ3BnCBjOD8i24kzNOiOLLgsSxhA==" + }, "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -9957,6 +10035,35 @@ } } }, + "ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "dev": true, + "requires": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, "tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", @@ -10085,6 +10192,12 @@ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "dev": true }, + "v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, "v8-to-istanbul": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", @@ -10289,6 +10402,12 @@ "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + }, "yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index 5a1f9f7..2b93e85 100644 --- a/package.json +++ b/package.json @@ -9,8 +9,9 @@ "build:watch": "rm -rf dist && tsc --watch", "lint": "eslint --ignore-path .eslintignore --ext .js,.ts .", "format": "prettier --write \"**/*.+(js|ts|json)\"", - "test": "npm run build && jest", - "test:coverage": "npm test -- --coverage", + "test:jest": "npm run build && jest --runInBand", + "test": "(cd ci; ./run-test-suite.sh)", + "test:coverage": "npm test", "test:watch": "npm run build && jest --watch", "dev": "tsc --watch" }, @@ -26,6 +27,7 @@ "axios": "^0.27.2", "jsonwebtoken": "^9.0.0", "node-cache": "^5.1.2", + "tiny-typed-emitter": "^2.1.0", "winston": "^3.8.2" }, "peerDependencies": { @@ -47,7 +49,7 @@ "@types/express": "^4.17.14", "@types/jest": "^29.2.0", "@types/jsonwebtoken": "^9.0.0", - "@types/node": "^12.20.55", + "@types/node": "^13.13.52", "@types/sinon": "^10.0.15", "@typescript-eslint/eslint-plugin": "^5.38.1", "@typescript-eslint/parser": "^5.38.1", @@ -58,6 +60,7 @@ "ioredis": "^5.2.5", "ioredis-mock": "^8.2.2", "jest": "^28.1.3", + "jest-extended": "^4.0.0", "jest-junit": "^14.0.1", "jest-mock-extended": "^3.0.4", "prettier": "^2.7.1", @@ -65,6 +68,7 @@ "semantic-release": "^21.0.5", "sinon": "^15.2.0", "ts-jest": "^28.0.8", + "ts-node": "^10.9.1", "typescript": "^4.8.4" } } diff --git a/src/authenticator/index.ts b/src/authenticator/index.ts index c05e6c3..0ad98e0 100644 --- a/src/authenticator/index.ts +++ b/src/authenticator/index.ts @@ -25,9 +25,9 @@ export class FronteggAuthenticator { return retry(() => this.authenticate(), { numberOfTries, - secondsDelayRange: { - min: 0.5, - max: 5, + delayRangeMs: { + min: 500, + max: 5000, }, }); } diff --git a/src/cache/cache.manager.interface.ts b/src/cache/cache.manager.interface.ts deleted file mode 100644 index 60221ed..0000000 --- a/src/cache/cache.manager.interface.ts +++ /dev/null @@ -1,9 +0,0 @@ -export interface SetOptions { - expiresInSeconds: number; -} - -export interface ICacheManager { - set(key: string, data: T, options?: SetOptions): Promise; - get(key: string): Promise; - del(key: string[]): Promise; -} diff --git a/src/cache/index.ts b/src/cache/index.ts deleted file mode 100644 index f38d27f..0000000 --- a/src/cache/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './cache.manager.interface'; -export * from './local-cache.manager'; -export * from './ioredis-cache.manager'; -export * from './redis-cache.manager'; diff --git a/src/cache/ioredis-cache.manager.spec.ts b/src/cache/ioredis-cache.manager.spec.ts deleted file mode 100644 index 926c40c..0000000 --- a/src/cache/ioredis-cache.manager.spec.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { IORedisCacheManager } from './ioredis-cache.manager'; - -jest.mock('../utils/package-loader', () => ({ - PackageUtils: { - loadPackage: (name: string) => { - switch (name) { - case 'ioredis': - return require('ioredis-mock'); - } - }, - }, -})); - -describe('IORedis cache manager', () => { - //@ts-ignore - const redisCacheManager = new IORedisCacheManager<{ data: string }>(); - const cacheKey = 'key'; - const cacheValue = { data: 'value' }; - - it('should set, get and delete from redis cache manager', async () => { - await redisCacheManager.set(cacheKey, cacheValue); - const res = await redisCacheManager.get(cacheKey); - expect(res).toEqual(cacheValue); - await redisCacheManager.del([cacheKey]); - const resAfterDel = await redisCacheManager.get(cacheKey); - expect(resAfterDel).toEqual(null); - }); - - it('should get null after expiration time', async () => { - await redisCacheManager.set(cacheKey, cacheValue, { expiresInSeconds: 1 }); - await new Promise((r) => setTimeout(r, 500)); - const res = await redisCacheManager.get(cacheKey); - expect(res).toEqual(cacheValue); - - await new Promise((r) => setTimeout(r, 600)); - - const resAfterDel = await redisCacheManager.get(cacheKey); - expect(resAfterDel).toEqual(null); - }); -}); diff --git a/src/cache/ioredis-cache.manager.ts b/src/cache/ioredis-cache.manager.ts deleted file mode 100644 index f830fdd..0000000 --- a/src/cache/ioredis-cache.manager.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { ICacheManager, SetOptions } from './cache.manager.interface'; -import { PackageUtils } from '../utils/package-loader'; -import { IIORedisCacheOptions } from './types'; - -export class IORedisCacheManager implements ICacheManager { - private redisManager: any; - - constructor(options: IIORedisCacheOptions) { - const RedisInstance = PackageUtils.loadPackage('ioredis') as any; - this.redisManager = new RedisInstance(options); - } - - public async set(key: string, data: T, options?: SetOptions): Promise { - if (options?.expiresInSeconds) { - this.redisManager.set(key, JSON.stringify(data), 'EX', options.expiresInSeconds); - } else { - this.redisManager.set(key, JSON.stringify(data)); - } - } - - public async get(key: string): Promise { - const stringifiedData = await this.redisManager.get(key); - return stringifiedData ? JSON.parse(stringifiedData) : null; - } - - public async del(key: string[]): Promise { - if (key.length) { - await this.redisManager.del(key); - } - } -} diff --git a/src/cache/local-cache.manager.spec.ts b/src/cache/local-cache.manager.spec.ts deleted file mode 100644 index ec61587..0000000 --- a/src/cache/local-cache.manager.spec.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { LocalCacheManager } from './local-cache.manager'; - -describe('Local cache manager', () => { - const localCacheManager = new LocalCacheManager<{ data: string }>(); - const cacheKey = 'key'; - const cacheValue = { data: 'value' }; - - it('should set, get and delete from local cache manager', async () => { - await localCacheManager.set(cacheKey, cacheValue); - const res = await localCacheManager.get(cacheKey); - expect(res).toEqual(cacheValue); - await localCacheManager.del([cacheKey]); - const resAfterDel = await localCacheManager.get(cacheKey); - expect(resAfterDel).toEqual(null); - }); - - it('should get null after expiration time', async () => { - await localCacheManager.set(cacheKey, cacheValue, { expiresInSeconds: 1 }); - await new Promise((r) => setTimeout(r, 500)); - const res = await localCacheManager.get(cacheKey); - expect(res).toEqual(cacheValue); - - await new Promise((r) => setTimeout(r, 600)); - - const resAfterDel = await localCacheManager.get(cacheKey); - expect(resAfterDel).toEqual(null); - }); -}); diff --git a/src/cache/local-cache.manager.ts b/src/cache/local-cache.manager.ts deleted file mode 100644 index 7a05871..0000000 --- a/src/cache/local-cache.manager.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { ICacheManager, SetOptions } from './cache.manager.interface'; -import * as NodeCache from 'node-cache'; - -export class LocalCacheManager implements ICacheManager { - private nodeCache: NodeCache = new NodeCache(); - - public async set(key: string, data: T, options?: SetOptions): Promise { - if (options?.expiresInSeconds) { - this.nodeCache.set(key, data, options.expiresInSeconds); - } else { - this.nodeCache.set(key, data); - } - } - - public async get(key: string): Promise { - return this.nodeCache.get(key) || null; - } - - public async del(key: string[]): Promise { - if (key.length) { - this.nodeCache.del(key); - } - } -} diff --git a/src/cache/redis-cache.manager.ts b/src/cache/redis-cache.manager.ts deleted file mode 100644 index b799ad4..0000000 --- a/src/cache/redis-cache.manager.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { ICacheManager, SetOptions } from './cache.manager.interface'; -import { PackageUtils } from '../utils/package-loader'; -import { IRedisCacheOptions } from './types'; -import Logger from '../components/logger'; - -export class RedisCacheManager implements ICacheManager { - private redisManager: any; - - constructor(options: IRedisCacheOptions) { - const { createClient } = PackageUtils.loadPackage('redis') as any; - this.redisManager = createClient(options); - this.redisManager.connect().catch((e) => Logger.error('Failed to connect to redis', e)); - } - - public async set(key: string, data: T, options?: SetOptions): Promise { - if (options?.expiresInSeconds) { - this.redisManager.set(key, JSON.stringify(data), { EX: options.expiresInSeconds }); - } else { - this.redisManager.set(key, JSON.stringify(data)); - } - } - - public async get(key: string): Promise { - const stringifiedData = await this.redisManager.get(key); - return stringifiedData ? JSON.parse(stringifiedData) : null; - } - - public async del(key: string[]): Promise { - if (key.length) { - await this.redisManager.del(key); - } - } -} diff --git a/src/cache/types.ts b/src/cache/types.ts index ae4741d..e69de29 100644 --- a/src/cache/types.ts +++ b/src/cache/types.ts @@ -1,10 +0,0 @@ -export interface IIORedisCacheOptions { - host: string; - password?: string; - port: number; - db?: number; -} - -export interface IRedisCacheOptions { - url: string; -} diff --git a/src/clients/entitlements/entitlements-client.events.ts b/src/clients/entitlements/entitlements-client-events.enum.ts similarity index 63% rename from src/clients/entitlements/entitlements-client.events.ts rename to src/clients/entitlements/entitlements-client-events.enum.ts index d4245f7..5272d58 100644 --- a/src/clients/entitlements/entitlements-client.events.ts +++ b/src/clients/entitlements/entitlements-client-events.enum.ts @@ -1,4 +1,4 @@ -export enum EntitlementsClientEvents { +export enum EntitlementsClientEventsEnum { INITIALIZED = 'initialized', SNAPSHOT_UPDATED = 'snapshot-updated', } diff --git a/src/clients/entitlements/entitlements-client.spec.ts b/src/clients/entitlements/entitlements-client.spec.ts index 12f86f8..c56e57c 100644 --- a/src/clients/entitlements/entitlements-client.spec.ts +++ b/src/clients/entitlements/entitlements-client.spec.ts @@ -9,7 +9,8 @@ import * as Sinon from 'sinon'; import { useFakeTimers } from 'sinon'; import { IUserAccessTokenWithRoles, tokenTypes } from '../identity/types'; import { EntitlementsUserScoped } from './entitlements.user-scoped'; -import { InMemoryEntitlementsCache } from './storage/in-memory/in-memory.cache'; +import { FronteggCache } from '../../components/cache'; +import { LocalCacheManager } from '../../components/cache/managers'; const { EntitlementsUserScoped: EntitlementsUserScopedActual } = jest.requireActual('./entitlements.user-scoped'); @@ -19,11 +20,15 @@ const httpMock = mock(); jest.mock('../../authenticator'); jest.mock('../http'); jest.mock('./entitlements.user-scoped'); +jest.mock('../../components/cache'); describe(EntitlementsClient.name, () => { let entitlementsClient: EntitlementsClient; beforeEach(() => { + // given + jest.mocked(FronteggCache.getInstance).mockImplementation(async () => LocalCacheManager.create()); + // given jest.mocked(FronteggAuthenticator).mockReturnValue(authenticatorMock); authenticatorMock.init.mockResolvedValue(undefined); @@ -210,7 +215,7 @@ describe(EntitlementsClient.name, () => { expect(scoped).toBeInstanceOf(EntitlementsUserScopedActual); // and - expect(EntitlementsUserScoped).toHaveBeenCalledWith(entity, expect.any(InMemoryEntitlementsCache)); + expect(EntitlementsUserScoped).toHaveBeenCalledWith(entity, expect.anything()); }); afterEach(() => { diff --git a/src/clients/entitlements/entitlements-client.ts b/src/clients/entitlements/entitlements-client.ts index cb221c8..e8179ec 100644 --- a/src/clients/entitlements/entitlements-client.ts +++ b/src/clients/entitlements/entitlements-client.ts @@ -1,53 +1,110 @@ import { IFronteggContext } from '../../components/frontegg-context/types'; import { FronteggContext } from '../../components/frontegg-context'; import { FronteggAuthenticator } from '../../authenticator'; -import { EntitlementsClientOptions, VendorEntitlementsDto, VendorEntitlementsSnapshotOffsetDto } from './types'; +import { + EntitlementsClientGivenOptions, + EntitlementsClientOptions, + VendorEntitlementsDto, + VendorEntitlementsSnapshotOffsetDto, +} from './types'; import { config } from '../../config'; import { HttpClient } from '../http'; import Logger from '../../components/logger'; import { retry } from '../../utils'; -import * as events from 'events'; -import { EntitlementsClientEvents } from './entitlements-client.events'; -import { EntitlementsCache } from './storage/types'; -import { InMemoryEntitlementsCache } from './storage/in-memory/in-memory.cache'; +import { EntitlementsClientEventsEnum } from './entitlements-client-events.enum'; import { TEntity } from '../identity/types'; import { EntitlementsUserScoped } from './entitlements.user-scoped'; +import { CacheRevisionManager } from './storage/cache.revision-manager'; +import { CacheValue, ICacheManager } from '../../components/cache/managers'; +import { hostname } from 'os'; +import { FronteggCache } from '../../components/cache'; +import { LeaderElection } from '../../components/leader-election'; +import { TypedEmitter } from 'tiny-typed-emitter'; +import { LeaderElectionFactory } from '../../components/leader-election/factory'; + +interface IEntitlementsClientEvents { + [EntitlementsClientEventsEnum.INITIALIZED]: () => void; + [EntitlementsClientEventsEnum.SNAPSHOT_UPDATED]: (revision: number) => void; +} -export class EntitlementsClient extends events.EventEmitter { +export class EntitlementsClient extends TypedEmitter { // periodical refresh handler - private refreshTimeout: NodeJS.Timeout; + private refreshTimeout?: NodeJS.Timeout; private readonly readyPromise: Promise; - private readonly options: EntitlementsClientOptions; - - // cache instance - private cache?: EntitlementsCache; - // snapshot data - private offset: number = -1; + private cacheManager: CacheRevisionManager; - private constructor(private readonly httpClient: HttpClient, options: Partial = {}) { + private constructor( + private readonly httpClient: HttpClient, + cache: ICacheManager, + private readonly leaderElection: LeaderElection, + private readonly options: EntitlementsClientOptions, + ) { super(); - this.options = this.parseOptions(options); + this.cacheManager = new CacheRevisionManager(cache); - this.readyPromise = new Promise((resolve) => { - this.once(EntitlementsClientEvents.INITIALIZED, () => resolve(this)); + this.on(EntitlementsClientEventsEnum.SNAPSHOT_UPDATED, (offset) => { + Logger.debug('[entitlements] Snapshot refreshed.', { offset }); }); - this.refreshTimeout = setTimeout( - () => - this.refreshSnapshot().then(() => { - this.emit(EntitlementsClientEvents.INITIALIZED); - }), - this.options.initializationDelayMs, - ); + this.readyPromise = this.setupInitialization(); + this.setupLeaderElection(); + } + + private setupInitialization(): Promise { + this.once(EntitlementsClientEventsEnum.SNAPSHOT_UPDATED, () => { + this.emit(EntitlementsClientEventsEnum.INITIALIZED); + }); + + return new Promise((resolve) => { + this.once(EntitlementsClientEventsEnum.INITIALIZED, () => resolve(this)); + }); + } + + private setupLeaderElection(): void { + this.leaderElection.on('leader', () => { + this.stopPeriodicJob(); + this.setupLeaderPeriodicJob(); + }); + + this.leaderElection.on('follower', () => { + this.stopPeriodicJob(); + this.setupFollowerPeriodicJob(); + }); + + this.leaderElection.start(); + } + + /** + * This method starts the periodic job that tries to fetch the latest version of cache from Redis. + * It's called only when current EntitlementsClient instance becomes the leader. + */ + setupLeaderPeriodicJob(): void { + this.refreshSnapshot(); + } + + /** + * This method starts the periodic job that tries to swap the EntitlementsCache + * to the latest available revision of RedisCache. + * + * It's called only when current EntitlementsClient instance becomes the follower. + */ + setupFollowerPeriodicJob(): void { + this.swapToLatestSnapshot(); + } + + stopPeriodicJob(): void { + this.refreshTimeout && clearTimeout(this.refreshTimeout); } - private parseOptions(givenOptions: Partial): EntitlementsClientOptions { + private static parseOptions(givenOptions: EntitlementsClientGivenOptions): EntitlementsClientOptions { return { - retry: { numberOfTries: 3, secondsDelayRange: { min: 0.5, max: 5 } }, + instanceId: hostname(), + retry: { numberOfTries: 3, delayRangeMs: { min: 500, max: 5_000 } }, initializationDelayMs: 0, refreshTimeoutMs: 30_000, + leaderElection: { key: 'entitlements_client_leader' }, ...givenOptions, }; } @@ -57,74 +114,76 @@ export class EntitlementsClient extends events.EventEmitter { } forUser(entity: T): EntitlementsUserScoped { - if (!this.cache) { + const cache = this.cacheManager.getCache(); + if (!cache) { throw new Error('EntitlementsClient is not initialized yet.'); } - return new EntitlementsUserScoped(entity, this.cache); + return new EntitlementsUserScoped(entity, cache); } private async loadVendorEntitlements(): Promise { const entitlementsData = await this.httpClient.get('/api/v1/vendor-entitlements'); - const vendorEntitlementsDto = entitlementsData.data; - const newOffset = entitlementsData.data.snapshotOffset; - - const newCache = await InMemoryEntitlementsCache.initialize(vendorEntitlementsDto, newOffset.toString()); - const oldCache = this.cache; - this.cache = newCache; - this.offset = entitlementsData.data.snapshotOffset; + const { isUpdated, revision } = await this.cacheManager.loadSnapshotAsCurrentRevision(vendorEntitlementsDto); - // clean - await oldCache?.clear(); - await oldCache?.shutdown(); - - // emit - this.emit(EntitlementsClientEvents.SNAPSHOT_UPDATED, entitlementsData.data.snapshotOffset); + if (isUpdated) { + this.emit(EntitlementsClientEventsEnum.SNAPSHOT_UPDATED, revision); + } } private async refreshSnapshot(): Promise { await retry(async () => { - if (!(await this.haveRecentSnapshot())) { - Logger.debug('[entitlements] Refreshing the outdated snapshot.', { currentOffset: this.offset }); + if (!(await this.isCacheUpToDate())) { + Logger.debug('[entitlements] Refreshing the outdated snapshot.'); await this.loadVendorEntitlements(); - Logger.debug('[entitlements] Snapshot refreshed.', { currentOffset: this.offset }); } }, this.options.retry); this.refreshTimeout = setTimeout(() => this.refreshSnapshot(), this.options.refreshTimeoutMs); } - private async haveRecentSnapshot(): Promise { - const serverOffsetDto = await this.httpClient.get( - '/api/v1/vendor-entitlements-snapshot-offset', + private async swapToLatestSnapshot(): Promise { + const { isUpdated, revision } = await this.cacheManager.followRevision( + await this.cacheManager.getCurrentCacheRevision(), ); - const isRecent = serverOffsetDto.data.snapshotOffset === this.offset; + if (isUpdated) { + this.emit(EntitlementsClientEventsEnum.SNAPSHOT_UPDATED, revision); + } - Logger.debug('[entitlements] Offsets compared.', { - isRecent, - serverOffset: serverOffsetDto.data.snapshotOffset, - localOffset: this.offset, - }); + this.refreshTimeout = setTimeout(() => this.swapToLatestSnapshot(), this.options.refreshTimeoutMs); + } - return isRecent; + private async isCacheUpToDate(): Promise { + const serverOffsetDto = await this.httpClient.get( + '/api/v1/vendor-entitlements-snapshot-offset', + ); + return await this.cacheManager.hasGivenSnapshot(serverOffsetDto.data); } static async init( context: IFronteggContext = FronteggContext.getContext(), - options: Partial = {}, + givenOptions: EntitlementsClientGivenOptions = {}, ): Promise { + const options = EntitlementsClient.parseOptions(givenOptions); + const authenticator = new FronteggAuthenticator(); await authenticator.init(context.FRONTEGG_CLIENT_ID, context.FRONTEGG_API_KEY); const httpClient = new HttpClient(authenticator, { baseURL: config.urls.entitlementsService }); + const cache = await FronteggCache.getInstance(); - return new EntitlementsClient(httpClient, options); + return new EntitlementsClient( + httpClient, + cache, + LeaderElectionFactory.fromCache(options.instanceId, cache, options.leaderElection), + options, + ); } destroy(): void { - clearTimeout(this.refreshTimeout); + this.refreshTimeout && clearTimeout(this.refreshTimeout); } } diff --git a/src/clients/entitlements/entitlements.user-scoped.spec.ts b/src/clients/entitlements/entitlements.user-scoped.spec.ts index 6418d4f..b01cfa9 100644 --- a/src/clients/entitlements/entitlements.user-scoped.spec.ts +++ b/src/clients/entitlements/entitlements.user-scoped.spec.ts @@ -5,7 +5,7 @@ import { } from './entitlements.user-scoped'; import { IUser, IUserAccessToken, IUserApiToken, TEntityWithRoles, tokenTypes } from '../identity/types'; import { mock, mockReset } from 'jest-mock-extended'; -import { EntitlementsCache, NO_EXPIRE } from './storage/types'; +import { IEntitlementsCache, NO_EXPIRE } from './storage/types'; import { EntitlementJustifications } from './types'; import SpyInstance = jest.SpyInstance; @@ -26,19 +26,19 @@ const userApiTokenBase: Pick< const userAccessTokenBase: Pick = { type: tokenTypes.UserAccessToken, id: 'irrelevant', - sub: 'irrelevant' -} + sub: 'irrelevant', +}; const userTokenBase: Pick = { type: tokenTypes.UserToken, id: 'irrelevant', userId: 'irrelevant', roles: ['irrelevant'], - metadata: {} -} + metadata: {}, +}; describe(EntitlementsUserScoped.name, () => { - const cacheMock = mock(); + const cacheMock = mock(); let cut: EntitlementsUserScoped; afterEach(() => { @@ -46,13 +46,14 @@ describe(EntitlementsUserScoped.name, () => { }); describe.each([ - { tokenType: tokenTypes.UserApiToken, + { + tokenType: tokenTypes.UserApiToken, entity: { ...userApiTokenBase, permissions: ['foo'], userId: 'the-user-id', tenantId: 'the-tenant-id', - } as IUserApiToken + } as IUserApiToken, }, { tokenType: tokenTypes.UserAccessToken, @@ -61,18 +62,18 @@ describe(EntitlementsUserScoped.name, () => { userId: 'the-user-id', tenantId: 'the-tenant-id', roles: [], - permissions: ['foo'] - } as TEntityWithRoles + permissions: ['foo'], + } as TEntityWithRoles, }, { tokenType: tokenTypes.UserToken, entity: { ...userTokenBase, - permissions: [ 'foo' ], + permissions: ['foo'], sub: 'the-user-id', - tenantId: 'the-tenant-id' - } as IUser - } + tenantId: 'the-tenant-id', + } as IUser, + }, ])('given the authenticated user using $tokenType with permission "foo" granted', ({ entity }) => { beforeEach(() => { cut = new EntitlementsUserScoped(entity, cacheMock); diff --git a/src/clients/entitlements/entitlements.user-scoped.ts b/src/clients/entitlements/entitlements.user-scoped.ts index 5eaaf57..08c42e5 100644 --- a/src/clients/entitlements/entitlements.user-scoped.ts +++ b/src/clients/entitlements/entitlements.user-scoped.ts @@ -1,6 +1,6 @@ import { EntitlementJustifications, IsEntitledResult } from './types'; import { IEntityWithRoles, Permission, TEntity, tokenTypes, TUserEntity } from '../identity/types'; -import { EntitlementsCache, NO_EXPIRE } from './storage/types'; +import { IEntitlementsCache, NO_EXPIRE } from './storage/types'; import { pickExpTimestamp } from './storage/exp-time.utils'; export type IsEntitledToPermissionInput = { permissionKey: string }; @@ -11,7 +11,7 @@ export class EntitlementsUserScoped { private readonly userId?: string; private readonly permissions: Permission[]; - constructor(private readonly entity: T, private readonly cache: EntitlementsCache) { + constructor(private readonly entity: T, private readonly cache: IEntitlementsCache) { this.tenantId = entity.tenantId; const entityWithUserId = entity as TUserEntity; @@ -31,7 +31,7 @@ export class EntitlementsUserScoped { return entity.sub; case tokenTypes.UserApiToken: case tokenTypes.UserAccessToken: - return entity.userId; + return entity.userId; } } diff --git a/src/clients/entitlements/storage/cache.revision-manager.spec.ts b/src/clients/entitlements/storage/cache.revision-manager.spec.ts new file mode 100644 index 0000000..02e782b --- /dev/null +++ b/src/clients/entitlements/storage/cache.revision-manager.spec.ts @@ -0,0 +1,196 @@ +import { CacheRevisionManager, CURRENT_CACHE_REVISION } from './cache.revision-manager'; +import { mock, mockReset } from 'jest-mock-extended'; +import { CacheValue, ICacheManager } from '../../../components/cache/managers'; +import { VendorEntitlementsDto } from '../types'; +import type { FronteggEntitlementsCache } from './frontegg-cache/frontegg.cache'; +import { FronteggEntitlementsCacheInitializer } from './frontegg-cache/frontegg.cache-initializer'; + +jest.mock('./frontegg-cache/frontegg.cache-initializer'); + +describe(CacheRevisionManager.name, () => { + const entitlementsCacheMock = mock(); + + const cacheMock = mock>(); + let cut: CacheRevisionManager; + + beforeAll(() => { + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockResolvedValue(entitlementsCacheMock); + jest.mocked(FronteggEntitlementsCacheInitializer.forLeader).mockResolvedValue(entitlementsCacheMock); + }); + + beforeEach(() => { + cut = new CacheRevisionManager(cacheMock); + }); + + afterEach(() => { + mockReset(cacheMock); + mockReset(entitlementsCacheMock); + + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockClear(); + jest.mocked(FronteggEntitlementsCacheInitializer.forLeader).mockClear(); + }); + + describe('given the currently supported revision is: 1', () => { + beforeEach(() => { + cacheMock.get.calledWith(CURRENT_CACHE_REVISION).mockResolvedValue(1); + + cut.followRevision(1); + }); + + it('when I call .getCurrentCacheRevision(), then it resolves to that revision (1), taken from cache.', async () => { + // when & then + await expect(cut.getCurrentCacheRevision()).resolves.toEqual(1); + + // then + expect(cacheMock.get).toHaveBeenCalledWith(CURRENT_CACHE_REVISION); + }); + + describe('when .loadSnapshotAsCurrent(..) is called with DTO having different offset', () => { + function getDTO(rev: number): VendorEntitlementsDto { + return { + snapshotOffset: rev, + data: { + features: [], + entitlements: [], + featureBundles: [], + }, + }; + } + + let loadingSnapshotResult; + + describe('with DTO having different offset (333)', () => { + let expectedNewEntitlementsCache; + + beforeEach(async () => { + // given: expected new entitlements cache instance following given revision + expectedNewEntitlementsCache = mock(); + jest.mocked(FronteggEntitlementsCacheInitializer.forLeader).mockResolvedValue(expectedNewEntitlementsCache); + + // when + loadingSnapshotResult = await cut.loadSnapshotAsCurrentRevision(getDTO(333)); + }); + + it('then it resolves to IsUpdatedToRev structure telling with updated revision.', async () => { + // then + expect(loadingSnapshotResult).toEqual({ isUpdated: true, revision: 333 }); + }); + + it('then new offset is stored as current cache revision.', async () => { + // then + expect(cacheMock.set).toHaveBeenCalledWith(CURRENT_CACHE_REVISION, 333); + }); + + it('then new instance of entitlements cache is created from given DTO.', () => { + // then + expect(FronteggEntitlementsCacheInitializer.forLeader).toHaveBeenCalledWith(getDTO(333)); + + // and + expect(cut.getCache()).toBe(expectedNewEntitlementsCache); + }); + }); + + describe('with DTO having the same offset (1)', () => { + beforeEach(async () => { + // given + jest.mocked(FronteggEntitlementsCacheInitializer.forLeader).mockClear(); + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockClear(); + + // when + loadingSnapshotResult = await cut.loadSnapshotAsCurrentRevision(getDTO(1)); + }); + + it('then it resolves to IsUpdatedToRev structure telling nothing got updated and revision (1).', async () => { + // then + expect(loadingSnapshotResult).toEqual({ isUpdated: false, revision: 1 }); + }); + + it('then new entitlements cache instance was not created.', async () => { + // then + expect(FronteggEntitlementsCacheInitializer.forLeader).not.toHaveBeenCalled(); + expect(FronteggEntitlementsCacheInitializer.forFollower).not.toHaveBeenCalled(); + + // and + expect(cut.getCache()).toBe(entitlementsCacheMock); + }); + + it('then DTO revision (1) is not updated.', async () => { + // then + expect(cacheMock.set).not.toHaveBeenCalledWith(CURRENT_CACHE_REVISION, expect.anything()); + }); + }); + }); + + describe('when .followRevision(..) is called', () => { + let resultPromise; + + describe('with the same revision (1) as currently stored', () => { + let resultPromise; + beforeEach(async () => { + // given: clear the execution count here + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockClear(); + + // when: follow the same revision + resultPromise = cut.followRevision(1); + }); + + it('then entitlements cache is not replaced.', async () => { + // when + await resultPromise; + + // then + expect(FronteggEntitlementsCacheInitializer.forFollower).not.toHaveBeenCalled(); + }); + + it('then it resolves to IsUpdatedToRev structure with the current revision (1).', async () => { + // when & then + await expect(resultPromise).resolves.toEqual({ + isUpdated: false, + revision: 1, + }); + }); + }); + + describe('with the different revision (3) than currently stored', () => { + let expectedNewEntitlementsCache; + + beforeEach(async () => { + // given: clear the execution count here + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockClear(); + + // given: expected new entitlements cache instance following given revision + expectedNewEntitlementsCache = mock(); + jest.mocked(FronteggEntitlementsCacheInitializer.forFollower).mockResolvedValue(expectedNewEntitlementsCache); + + // when: follow the same revision + resultPromise = cut.followRevision(3); + }); + + it('then it resolves to IsUpdatedToRev structure with the new revision and "isUpdated" flag up.', async () => { + // when & then + await expect(resultPromise).resolves.toEqual({ + isUpdated: true, + revision: 3, + }); + }); + + it('then new instance of entitlements cache for new revision (3) is created.', async () => { + // when + await resultPromise; + + // then + expect(FronteggEntitlementsCacheInitializer.forFollower).toHaveBeenCalledWith(3); + + // and + expect(cut.getCache() === expectedNewEntitlementsCache).toBeTruthy(); + }); + }); + }); + }); + + describe('given the instance does neither follow any revision, nor loaded any revision to cache', () => { + it('when I call .getCache(), then it returns undefined.', () => { + expect(cut.getCache()).toBeUndefined(); + }); + }); +}); diff --git a/src/clients/entitlements/storage/cache.revision-manager.ts b/src/clients/entitlements/storage/cache.revision-manager.ts new file mode 100644 index 0000000..2a7d0ab --- /dev/null +++ b/src/clients/entitlements/storage/cache.revision-manager.ts @@ -0,0 +1,74 @@ +import { CacheValue, ICacheManager } from '../../../components/cache/managers'; +import { VendorEntitlementsDto, VendorEntitlementsSnapshotOffsetDto } from '../types'; +import { IEntitlementsCache } from './types'; +import { FronteggEntitlementsCacheInitializer } from './frontegg-cache/frontegg.cache-initializer'; +import Logger from '../../../components/logger'; + +export const CURRENT_CACHE_REVISION = 'latest-cache-rev'; + +type IsUpdatedToRev = { isUpdated: boolean; revision: number }; + +export class CacheRevisionManager { + private entitlementsCache?: IEntitlementsCache; + + private localRev?: number; + + constructor(private readonly cache: ICacheManager) {} + + async loadSnapshotAsCurrentRevision(dto: VendorEntitlementsDto): Promise { + const currentRevision = await this.getCurrentCacheRevision(); + const givenRevision = dto.snapshotOffset; + + if (currentRevision === givenRevision) return { isUpdated: false, revision: currentRevision }; + + // re-initialize the cache + const oldCache = this.entitlementsCache; + this.entitlementsCache = await FronteggEntitlementsCacheInitializer.forLeader(dto); + + await this.setCurrentCacheRevision(givenRevision); + this.localRev = givenRevision; + + // clean + await oldCache?.clear(); + + return { isUpdated: true, revision: givenRevision }; + } + + async followRevision(revision: number): Promise { + if (revision && this.localRev !== revision) { + this.localRev = revision; + + // trigger the revision update here + this.entitlementsCache = await FronteggEntitlementsCacheInitializer.forFollower(revision); + + return { isUpdated: true, revision }; + } + + return { isUpdated: false, revision: this.localRev || 0 }; + } + + async hasGivenSnapshot(dto: VendorEntitlementsSnapshotOffsetDto): Promise { + const currentOffset = await this.getCurrentCacheRevision(); + const isEqual = dto.snapshotOffset === currentOffset; + + Logger.debug('[entitlements] Offsets compared.', { + isEqual, + serverOffset: dto.snapshotOffset, + localOffset: currentOffset, + }); + + return isEqual; + } + + private async setCurrentCacheRevision(offset: number): Promise { + await this.cache.set(CURRENT_CACHE_REVISION, offset); + } + + async getCurrentCacheRevision(): Promise { + return (await this.cache.get(CURRENT_CACHE_REVISION)) || 0; + } + + getCache(): IEntitlementsCache | undefined { + return this.entitlementsCache; + } +} diff --git a/src/clients/entitlements/storage/dto-to-cache-sources.mapper.ts b/src/clients/entitlements/storage/dto-to-cache-sources.mapper.ts new file mode 100644 index 0000000..bb2c5d7 --- /dev/null +++ b/src/clients/entitlements/storage/dto-to-cache-sources.mapper.ts @@ -0,0 +1,114 @@ +import { FeatureId, VendorEntitlementsDto } from '../types'; +import { BundlesSource, ExpirationTime, FeatureSource, NO_EXPIRE, UNBUNDLED_SRC_ID } from './types'; + +function ensureMapInMap>(map: Map, mapKey: K): T { + if (!map.has(mapKey)) { + map.set(mapKey, new Map() as T); + } + + return map.get(mapKey)!; +} + +function ensureArrayInMap(map: Map, mapKey: K): T[] { + if (!map.has(mapKey)) { + map.set(mapKey, []); + } + + return map.get(mapKey)!; +} + +function parseExpirationTime(time?: string | null): ExpirationTime { + if (time !== undefined && time !== null) { + return new Date(time).getTime(); + } + + return NO_EXPIRE; +} + +export class DtoToCacheSourcesMapper { + static map(dto: VendorEntitlementsDto): BundlesSource { + const { + data: { features, entitlements, featureBundles }, + } = dto; + + const bundlesMap: BundlesSource = new Map(); + const unbundledFeaturesIds: Set = new Set(); + + // helper features maps + const featuresMap: Map = new Map(); + features.forEach((feat) => { + const [id, key, permissions] = feat; + featuresMap.set(id, { + id, + key, + permissions: new Set(permissions || []), + }); + unbundledFeaturesIds.add(id); + }); + + // initialize bundles map + featureBundles.forEach((bundle) => { + const [id, featureIds] = bundle; + bundlesMap.set(id, { + id, + user_entitlements: new Map(), + tenant_entitlements: new Map(), + features: new Map( + featureIds.reduce>((prev, fId) => { + const featSource = featuresMap.get(fId); + + if (!featSource) { + // TODO: issue warning here! + } else { + prev.push([featSource.key, featSource]); + + // mark feature as bundled + unbundledFeaturesIds.delete(fId); + } + + return prev; + }, []), + ), + }); + }); + + // fill bundles with entitlements + entitlements.forEach((entitlement) => { + const [featureBundleId, tenantId, userId, expirationDate] = entitlement; + const bundle = bundlesMap.get(featureBundleId); + + if (bundle) { + if (userId) { + // that's user-targeted entitlement + const tenantUserEntitlements = ensureMapInMap(bundle.user_entitlements, tenantId); + const usersEntitlements = ensureArrayInMap(tenantUserEntitlements, userId); + + usersEntitlements.push(parseExpirationTime(expirationDate)); + } else { + // that's tenant-targeted entitlement + const tenantEntitlements = ensureArrayInMap(bundle.tenant_entitlements, tenantId); + + tenantEntitlements.push(parseExpirationTime(expirationDate)); + } + } else { + // TODO: issue warning here! + } + }); + + // make "dummy" bundle for unbundled features + bundlesMap.set(UNBUNDLED_SRC_ID, { + id: UNBUNDLED_SRC_ID, + user_entitlements: new Map(), + tenant_entitlements: new Map(), + features: new Map( + [...unbundledFeaturesIds.values()].map((fId) => { + const featSource = featuresMap.get(fId)!; + + return [featSource.key, featSource]; + }), + ), + }); + + return bundlesMap; + } +} diff --git a/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.spec.ts b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.spec.ts new file mode 100644 index 0000000..88471fe --- /dev/null +++ b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.spec.ts @@ -0,0 +1,173 @@ +import 'jest-extended'; +import { FronteggEntitlementsCacheInitializer } from './frontegg.cache-initializer'; +import { FronteggEntitlementsCache } from './frontegg.cache'; +import { ICacheManager, ICacheManagerCollection, ICacheManagerMap } from '../../../../components/cache/managers'; +import { mock, mockClear } from 'jest-mock-extended'; +import { FronteggCache } from '../../../../components/cache'; +import { EntitlementTuple, FeatureBundleTuple, FeatureTuple, VendorEntitlementsDto } from '../../types'; +import { ENTITLEMENTS_MAP_KEY, PERMISSIONS_COLLECTION_LIST } from './frontegg.cache-key.utils'; + +jest.mock('./frontegg.cache'); +jest.mock('../../../../components/cache'); + +const FronteggEntitlementsCache_Actual: typeof FronteggEntitlementsCache = + jest.requireActual('./frontegg.cache').FronteggEntitlementsCache; + +describe(FronteggEntitlementsCacheInitializer.name, () => { + let cacheMock = mock>(); + let entitlementsCacheMock = mock(); + + beforeAll(() => { + // given: frontegg cache is mocked + jest.mocked(FronteggCache.getInstance).mockResolvedValue(cacheMock); + + // given: mocked cache will scope to itself + cacheMock.forScope.mockReturnValue(cacheMock); + + // given: entitlements cache returns the mocked cache + entitlementsCacheMock.getCacheManager.mockReturnValue(cacheMock); + }); + + beforeEach(() => { + // given: by default return the mocked entitlements cache + jest.mocked(FronteggEntitlementsCache).mockReturnValue(entitlementsCacheMock); + }); + + afterEach(() => { + mockClear(cacheMock); + mockClear(entitlementsCacheMock); + + jest.mocked(FronteggEntitlementsCache).mockReset(); + }); + + describe('when .forFollower(..) is called with revision (3)', () => { + it('then it sets up FronteggEntitlementsCache to track revision in FronteggCache cache.', async () => { + // given: call the real constructor + jest + .mocked(FronteggEntitlementsCache) + .mockImplementationOnce( + (cache: ICacheManager, revision: number) => new FronteggEntitlementsCache_Actual(cache, revision), + ); + + // when & then + await expect(FronteggEntitlementsCacheInitializer.forFollower(33)).resolves.toBeInstanceOf( + FronteggEntitlementsCache_Actual, + ); + + // then + expect(FronteggEntitlementsCache).toHaveBeenCalledWith(cacheMock, 33); + }); + + it('then it does not set any value to cache.', async () => { + // when + await FronteggEntitlementsCacheInitializer.forFollower(33); + + // then + expect(cacheMock.set).not.toHaveBeenCalled(); + expect(cacheMock.map).not.toHaveBeenCalled(); + expect(cacheMock.del).not.toHaveBeenCalled(); + expect(cacheMock.collection).not.toHaveBeenCalled(); + }); + }); + + describe('given vendor entitlements DTO with offset (5)', () => { + function buildDTO( + offset: number, + features: FeatureTuple[] = [], + bundles: FeatureBundleTuple[] = [], + entitlements: EntitlementTuple[] = [], + ): VendorEntitlementsDto { + return { + snapshotOffset: offset, + data: { + features, + featureBundles: bundles, + entitlements, + }, + }; + } + + describe('and feature, bundle and entitlement', () => { + let dto: VendorEntitlementsDto; + + beforeEach(() => { + dto = buildDTO( + 5, + [ + ['f-1', 'foo', ['foo.read']], + ['f-2', 'boo', ['foo.write']], + ], + [['b-1', ['f-1', 'f-2']]], + [ + ['b-1', 't-1', 'u-1', undefined], + ['b-1', 't-2', undefined, undefined], + ], + ); + }); + + describe('when .forLeader(dto) is called', () => { + let result; + + const permissionToFeatureCollectionMock = mock>(); + const permissionsCollectionMock = mock>(); + const entitlementsMapMock = mock>(); + + beforeAll(() => { + cacheMock.collection.calledWith(PERMISSIONS_COLLECTION_LIST).mockReturnValue(permissionsCollectionMock); + cacheMock.map.calledWith(ENTITLEMENTS_MAP_KEY).mockReturnValue(entitlementsMapMock); + cacheMock.collection + .calledWith(expect.stringContaining('perms:')) + .mockReturnValue(permissionToFeatureCollectionMock); + }); + + beforeEach(async () => { + result = await FronteggEntitlementsCacheInitializer.forLeader(dto); + }); + + afterEach(() => { + mockClear(permissionsCollectionMock); + mockClear(entitlementsMapMock); + }); + + it('then list of all permissions is written to cache.', () => { + // then + expect(permissionsCollectionMock.set).toHaveBeenCalledWith('foo.read'); + expect(permissionsCollectionMock.set).toHaveBeenCalledWith('foo.write'); + + expect(permissionsCollectionMock.set).toHaveBeenCalledTimes(2); + + // and: + expect(cacheMock.collection).toHaveBeenCalledWith('perms:foo.read'); + expect(permissionToFeatureCollectionMock.set).toHaveBeenCalledWith('foo'); + + // and: + expect(cacheMock.collection).toHaveBeenCalledWith('perms:foo.write'); + expect(permissionToFeatureCollectionMock.set).toHaveBeenCalledWith('boo'); + }); + + it('then mapping of permission to feature is written to cache.', () => { + // then + expect(cacheMock.collection).toHaveBeenCalledWith('perms:foo.read'); + expect(permissionToFeatureCollectionMock.set).toHaveBeenCalledWith('foo'); + + // and: + expect(cacheMock.collection).toHaveBeenCalledWith('perms:foo.write'); + expect(permissionToFeatureCollectionMock.set).toHaveBeenCalledWith('boo'); + }); + + it('then each entitlement to bundle is resolved to "entitlement to feature" and written to cache.', () => { + // then + expect(cacheMock.map).toHaveBeenCalledWith(ENTITLEMENTS_MAP_KEY); + + // and + expect(entitlementsMapMock.set).toHaveBeenCalledWith('t-1:u-1:foo', expect.toBeNumber()); + expect(entitlementsMapMock.set).toHaveBeenCalledWith('t-1:u-1:boo', expect.toBeNumber()); + + // and + expect(entitlementsMapMock.set).toHaveBeenCalledWith('t-2::foo', expect.toBeNumber()); + expect(entitlementsMapMock.set).toHaveBeenCalledWith('t-2::boo', expect.toBeNumber()); + }); + }); + }); + }); +}); diff --git a/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.ts b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.ts new file mode 100644 index 0000000..1e128dd --- /dev/null +++ b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-initializer.ts @@ -0,0 +1,108 @@ +import { FronteggEntitlementsCache } from './frontegg.cache'; +import { VendorEntitlementsDto } from '../../types'; +import { BundlesSource } from '../types'; +import { + ENTITLEMENTS_MAP_KEY, + getFeatureEntitlementKey, + getPermissionMappingKey, + OFFSET_KEY, + PERMISSIONS_COLLECTION_LIST, +} from './frontegg.cache-key.utils'; +import { DtoToCacheSourcesMapper } from '../dto-to-cache-sources.mapper'; +import { pickExpTimestamp } from '../exp-time.utils'; +import { FronteggCache } from '../../../../components/cache'; + +export class FronteggEntitlementsCacheInitializer { + static readonly CLEAR_TTL = 60 * 60 * 1000; + + constructor(private readonly entitlementsCache: FronteggEntitlementsCache) {} + + static async forLeader(dto: VendorEntitlementsDto): Promise { + const revision = dto.snapshotOffset; + + const cache = await FronteggCache.getInstance(); + const entitlementsCache = new FronteggEntitlementsCache(cache, revision); + + const cacheInitializer = new FronteggEntitlementsCacheInitializer(entitlementsCache); + + const sources = DtoToCacheSourcesMapper.map(dto); + + await cacheInitializer.setupPermissionsReadModel(sources); + await cacheInitializer.setupEntitlementsReadModel(sources); + await cacheInitializer.setupRevisionNumber(revision); + + return entitlementsCache; + } + + static async forFollower(revision: number): Promise { + return new FronteggEntitlementsCache(await FronteggCache.getInstance(), revision); + } + + private async setupPermissionsReadModel(src: BundlesSource): Promise { + const cache = this.entitlementsCache.getCacheManager(); + const permissionsList = cache.collection(PERMISSIONS_COLLECTION_LIST); + + for (const singleBundle of src.values()) { + for (const feature of singleBundle.features.values()) { + for (const permission of feature.permissions) { + // set permission => features mapping + await cache.collection(getPermissionMappingKey(permission)).set(feature.key); + + // add permission to the list + await permissionsList.set(permission); + } + } + } + } + + private async setupEntitlementsReadModel(src: BundlesSource): Promise { + const entitlementsHashMap = this.entitlementsCache.getCacheManager().map(ENTITLEMENTS_MAP_KEY); + + // iterating over bundles.. + for (const singleBundle of src.values()) { + // iterating over tenant&user entitlements + for (const [tenantId, usersOfTenantEntitlements] of singleBundle.user_entitlements) { + // iterating over per-user entitlements + for (const [userId, expTimes] of usersOfTenantEntitlements) { + const entitlementExpTime = pickExpTimestamp(expTimes); + + await Promise.all( + [...singleBundle.features.values()].map((feature) => + entitlementsHashMap.set(getFeatureEntitlementKey(feature.key, tenantId, userId), entitlementExpTime), + ), + ); + } + } + + // iterating over tenant entitlements + for (const [tenantId, expTimes] of singleBundle.tenant_entitlements) { + for (const feature of singleBundle.features.values()) { + const entitlementExpTime = pickExpTimestamp(expTimes); + + await entitlementsHashMap.set(getFeatureEntitlementKey(feature.key, tenantId), entitlementExpTime); + } + } + } + } + + private async setupRevisionNumber(revision: number): Promise { + await this.entitlementsCache.getCacheManager().set(OFFSET_KEY, revision); + } + + async clear(): Promise { + const cache = this.entitlementsCache.getCacheManager(); + + // clear permissions maps + const allPermissions = await cache.collection(PERMISSIONS_COLLECTION_LIST).getAll(); + + for (const permission of allPermissions) { + await cache.expire([getPermissionMappingKey(permission)], FronteggEntitlementsCacheInitializer.CLEAR_TTL); + } + + // clear static fields + await cache.expire( + [PERMISSIONS_COLLECTION_LIST, ENTITLEMENTS_MAP_KEY], + FronteggEntitlementsCacheInitializer.CLEAR_TTL, + ); + } +} diff --git a/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-key.utils.ts b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-key.utils.ts new file mode 100644 index 0000000..137494e --- /dev/null +++ b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache-key.utils.ts @@ -0,0 +1,14 @@ +import { FeatureKey } from '../../types'; +import { Permission } from '../../../identity/types'; + +export const ENTITLEMENTS_MAP_KEY = 'entitlements'; +export const PERMISSIONS_COLLECTION_LIST = 'permissions'; +export const OFFSET_KEY = 'snapshot-offset'; + +export function getFeatureEntitlementKey(featKey: FeatureKey, tenantId: string, userId = ''): string { + return `${tenantId}:${userId}:${featKey}`; +} + +export function getPermissionMappingKey(permissionKey: Permission): string { + return `perms:${permissionKey}`; +} diff --git a/src/clients/entitlements/storage/in-memory/in-memory.cache.spec.ts b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache.spec.ts similarity index 80% rename from src/clients/entitlements/storage/in-memory/in-memory.cache.spec.ts rename to src/clients/entitlements/storage/frontegg-cache/frontegg.cache.spec.ts index 45ce576..82e3912 100644 --- a/src/clients/entitlements/storage/in-memory/in-memory.cache.spec.ts +++ b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache.spec.ts @@ -1,12 +1,23 @@ -import { InMemoryEntitlementsCache } from './in-memory.cache'; +import { FronteggEntitlementsCache } from './frontegg.cache'; import { NO_EXPIRE } from '../types'; +import { FronteggEntitlementsCacheInitializer } from './frontegg.cache-initializer'; +import { FronteggCache } from '../../../../components/cache'; +import { CacheValue, ICacheManager, LocalCacheManager } from '../../../../components/cache/managers'; -describe(InMemoryEntitlementsCache.name, () => { - let cut: InMemoryEntitlementsCache; +jest.mock('../../../../components/cache'); + +describe(FronteggEntitlementsCache.name, () => { + let cut: FronteggEntitlementsCache; + let cache: ICacheManager; + + beforeEach(async () => { + cache = await LocalCacheManager.create(); + jest.mocked(FronteggCache.getInstance).mockResolvedValue(cache); + }); describe('given input data with no entitlements and bundle with feature "foo"', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 1, data: { entitlements: [], @@ -23,8 +34,8 @@ describe(InMemoryEntitlementsCache.name, () => { }); describe('given input data with entitlement to bundle with feature "foo" (no permissions) for user "u-1"', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 2, data: { features: [['f-1', 'foo', []]], @@ -41,8 +52,8 @@ describe(InMemoryEntitlementsCache.name, () => { }); describe('given input data with entitlement to bundle with feature "foo" (no permissions) for tenant "t-1"', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 3, data: { features: [['f-1', 'foo', []]], @@ -64,8 +75,8 @@ describe(InMemoryEntitlementsCache.name, () => { }); describe('given input data with multiple time-restricted entitlements to bundle with feature "foo" (no permissions) for user "u-1" and tenant "t-2"', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 4, data: { features: [['f-1', 'foo', []]], @@ -92,8 +103,8 @@ describe(InMemoryEntitlementsCache.name, () => { }); describe('given input data with mix of time-restricted and unrestricted entitlements to bundle with feature "foo" (no permissions) for user "u-1" and tenant "t-2"', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 4, data: { features: [['f-1', 'foo', []]], @@ -120,8 +131,8 @@ describe(InMemoryEntitlementsCache.name, () => { }); describe('given input data with unbundled feature "foo" (with permission "bar.baz")', () => { - beforeEach(() => { - cut = InMemoryEntitlementsCache.initialize({ + beforeEach(async () => { + cut = await FronteggEntitlementsCacheInitializer.forLeader({ snapshotOffset: 5, data: { features: [['f-1', 'foo', ['bar.baz']]], diff --git a/src/clients/entitlements/storage/frontegg-cache/frontegg.cache.ts b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache.ts new file mode 100644 index 0000000..9eb2bfa --- /dev/null +++ b/src/clients/entitlements/storage/frontegg-cache/frontegg.cache.ts @@ -0,0 +1,44 @@ +import { ExpirationTime, IEntitlementsCache } from '../types'; +import { FeatureKey } from '../../types'; +import { ENTITLEMENTS_MAP_KEY, getFeatureEntitlementKey, getPermissionMappingKey } from './frontegg.cache-key.utils'; +import { ICacheManager } from '../../../../components/cache/managers'; +import { FronteggEntitlementsCacheInitializer } from './frontegg.cache-initializer'; + +export class FronteggEntitlementsCache implements IEntitlementsCache { + private readonly cache: ICacheManager; + + constructor(cache: ICacheManager, readonly revision: number) { + this.cache = cache.forScope(FronteggEntitlementsCache.getCachePrefix(revision)); + } + + static getCachePrefix(revision: number): string { + return `vendor_entitlements_${revision}_`; + } + + clear(): Promise { + return new FronteggEntitlementsCacheInitializer(this).clear(); + } + + async getEntitlementExpirationTime( + featKey: FeatureKey, + tenantId: string, + userId?: string, + ): Promise { + const entitlementKey = getFeatureEntitlementKey(featKey, tenantId, userId); + const result = await this.cache.map(ENTITLEMENTS_MAP_KEY).get(entitlementKey); + + return result || undefined; + } + + getLinkedFeatures(permissionKey: string): Promise> { + return this.cache.collection(getPermissionMappingKey(permissionKey)).getAll(); + } + + shutdown(): Promise { + return this.cache.close(); + } + + getCacheManager(): ICacheManager { + return this.cache; + } +} diff --git a/src/clients/entitlements/storage/in-memory/in-memory.cache-key.utils.ts b/src/clients/entitlements/storage/in-memory/in-memory.cache-key.utils.ts deleted file mode 100644 index 3cfa6b2..0000000 --- a/src/clients/entitlements/storage/in-memory/in-memory.cache-key.utils.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { FeatureKey } from '../../types'; - -export const ENTITLEMENTS_MAP_KEY = 'entitlements'; -export const PERMISSIONS_MAP_KEY = 'permissions'; -export const SRC_BUNDLES_KEY = 'src_bundles'; - -export function getFeatureEntitlementKey(featKey: FeatureKey, tenantId: string, userId = ''): string { - return `${tenantId}:${userId}:${featKey}`; -} diff --git a/src/clients/entitlements/storage/in-memory/in-memory.cache.ts b/src/clients/entitlements/storage/in-memory/in-memory.cache.ts deleted file mode 100644 index b27353e..0000000 --- a/src/clients/entitlements/storage/in-memory/in-memory.cache.ts +++ /dev/null @@ -1,246 +0,0 @@ -import { EntitlementsCache, ExpirationTime, NO_EXPIRE } from '../types'; -import { - EntitlementTuple, - FeatureBundleTuple, - FeatureTuple, - FeatureKey, - TenantId, - UserId, - VendorEntitlementsDto, - FeatureId, -} from '../../types'; -import { - ENTITLEMENTS_MAP_KEY, - PERMISSIONS_MAP_KEY, - SRC_BUNDLES_KEY, - getFeatureEntitlementKey, -} from './in-memory.cache-key.utils'; -import NodeCache = require('node-cache'); -import { pickExpTimestamp } from '../exp-time.utils'; -import { BundlesSource, EntitlementsMap, FeatureSource, PermissionsMap, UNBUNDLED_SRC_ID } from './types'; -import { Permission } from '../../../identity/types'; - -export class InMemoryEntitlementsCache implements EntitlementsCache { - private nodeCache: NodeCache; - - private constructor(readonly revision: string) { - this.nodeCache = new NodeCache({ - useClones: false, - errorOnMissing: true, - }); - } - - async getEntitlementExpirationTime( - featKey: FeatureKey, - tenantId: TenantId, - userId?: UserId, - ): Promise { - const entitlementsMap = this.nodeCache.get(ENTITLEMENTS_MAP_KEY); - if (!entitlementsMap) { - throw new Error('Cache is not properly initialized. Feature&Tenant&User => ExpirationTime map is missing.'); - } - - const entitlementKey = getFeatureEntitlementKey(featKey, tenantId, userId); - - return entitlementsMap.get(entitlementKey); - } - - async getLinkedFeatures(permissionKey: Permission): Promise> { - const permissionsMap = this.nodeCache.get(PERMISSIONS_MAP_KEY); - if (!permissionsMap) { - throw new Error('Cache is not properly initialized. Permissions => Features map is missing.'); - } - - const mapping = permissionsMap.get(permissionKey); - - return mapping || new Set(); - } - - static initialize(data: VendorEntitlementsDto, revPrefix?: string): InMemoryEntitlementsCache { - const cache = new InMemoryEntitlementsCache(revPrefix ?? data.snapshotOffset.toString()); - - const { - data: { features, entitlements, featureBundles }, - } = data; - - // build source structure - const sourceData = cache.buildSource(featureBundles, features, entitlements); - cache.nodeCache.set(SRC_BUNDLES_KEY, sourceData); - - // setup data for SDK to work - cache.setupEntitlementsReadModel(sourceData); - cache.setupPermissionsReadModel(sourceData); - - return cache; - } - - private buildSource( - bundles: FeatureBundleTuple[], - features: FeatureTuple[], - entitlements: EntitlementTuple[], - ): BundlesSource { - const bundlesMap: BundlesSource = new Map(); - const unbundledFeaturesIds: Set = new Set(); - - // helper features maps - const featuresMap: Map = new Map(); - features.forEach((feat) => { - const [id, key, permissions] = feat; - featuresMap.set(id, { - id, - key, - permissions: new Set(permissions || []), - }); - unbundledFeaturesIds.add(id); - }); - - // initialize bundles map - bundles.forEach((bundle) => { - const [id, featureIds] = bundle; - bundlesMap.set(id, { - id, - user_entitlements: new Map(), - tenant_entitlements: new Map(), - features: new Map( - featureIds.reduce>((prev, fId) => { - const featSource = featuresMap.get(fId); - - if (!featSource) { - // TODO: issue warning here! - } else { - prev.push([featSource.key, featSource]); - - // mark feature as bundled - unbundledFeaturesIds.delete(fId); - } - - return prev; - }, []), - ), - }); - }); - - // fill bundles with entitlements - entitlements.forEach((entitlement) => { - const [featureBundleId, tenantId, userId, expirationDate] = entitlement; - const bundle = bundlesMap.get(featureBundleId); - - if (bundle) { - if (userId) { - // that's user-targeted entitlement - const tenantUserEntitlements = this.ensureMapInMap(bundle.user_entitlements, tenantId); - const usersEntitlements = this.ensureArrayInMap(tenantUserEntitlements, userId); - - usersEntitlements.push(this.parseExpirationTime(expirationDate)); - } else { - // that's tenant-targeted entitlement - const tenantEntitlements = this.ensureArrayInMap(bundle.tenant_entitlements, tenantId); - - tenantEntitlements.push(this.parseExpirationTime(expirationDate)); - } - } else { - // TODO: issue warning here! - } - }); - - // make "dummy" bundle for unbundled features - bundlesMap.set(UNBUNDLED_SRC_ID, { - id: UNBUNDLED_SRC_ID, - user_entitlements: new Map(), - tenant_entitlements: new Map(), - features: new Map( - [...unbundledFeaturesIds.values()].map((fId) => { - const featSource = featuresMap.get(fId)!; - - return [featSource.key, featSource]; - }), - ), - }); - - return bundlesMap; - } - - private setupEntitlementsReadModel(src: BundlesSource): void { - const entitlementsReadModel: EntitlementsMap = new Map(); - - // iterating over bundles.. - src.forEach((singleBundle) => { - // iterating over tenant&user entitlements - singleBundle.user_entitlements.forEach((usersOfTenantEntitlements, tenantId) => { - // iterating over per-user entitlements - usersOfTenantEntitlements.forEach((expTimes, userId) => { - const entitlementExpTime = pickExpTimestamp(expTimes); - - singleBundle.features.forEach((feature) => { - entitlementsReadModel.set(getFeatureEntitlementKey(feature.key, tenantId, userId), entitlementExpTime); - }); - }); - }); - - // iterating over tenant entitlements - singleBundle.tenant_entitlements.forEach((expTimes, tenantId) => { - singleBundle.features.forEach((feature) => { - const entitlementExpTime = pickExpTimestamp(expTimes); - - entitlementsReadModel.set(getFeatureEntitlementKey(feature.key, tenantId), entitlementExpTime); - }); - }); - }); - - this.nodeCache.set(ENTITLEMENTS_MAP_KEY, entitlementsReadModel); - } - - private setupPermissionsReadModel(src: BundlesSource): void { - const permissionsReadModel: Map> = new Map(); - - src.forEach((singleBundle) => { - singleBundle.features.forEach((feature) => { - feature.permissions.forEach((permission) => { - this.ensureSetInMap(permissionsReadModel, permission).add(feature.key); - }); - }); - }); - - this.nodeCache.set(PERMISSIONS_MAP_KEY, permissionsReadModel); - } - - private ensureSetInMap(map: Map>, mapKey: K): Set { - if (!map.has(mapKey)) { - map.set(mapKey, new Set()); - } - - return map.get(mapKey)!; - } - - private ensureMapInMap>(map: Map, mapKey: K): T { - if (!map.has(mapKey)) { - map.set(mapKey, new Map() as T); - } - - return map.get(mapKey)!; - } - - private ensureArrayInMap(map: Map, mapKey: K): T[] { - if (!map.has(mapKey)) { - map.set(mapKey, []); - } - - return map.get(mapKey)!; - } - - private parseExpirationTime(time?: string | null): ExpirationTime { - if (time !== undefined && time !== null) { - return new Date(time).getTime(); - } - - return NO_EXPIRE; - } - - async clear(): Promise { - this.nodeCache.del(this.nodeCache.keys()); - } - - async shutdown(): Promise { - this.nodeCache.close(); - } -} diff --git a/src/clients/entitlements/storage/in-memory/types.ts b/src/clients/entitlements/storage/in-memory/types.ts deleted file mode 100644 index ab31604..0000000 --- a/src/clients/entitlements/storage/in-memory/types.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Permission } from '../../../identity/types'; -import { FeatureKey, TenantId, UserId } from '../../types'; -import { ExpirationTime } from '../types'; - -export const UNBUNDLED_SRC_ID = '__unbundled__'; -export type FeatureEntitlementKey = string; // tenant & user & feature key -export type EntitlementsMap = Map; -export type PermissionsMap = Map>; - -export type FeatureSource = { - id: string; - key: FeatureKey; - permissions: Set; -}; - -export type SingleEntityEntitlements = Map; - -export type SingleBundleSource = { - id: string; - features: Map; - user_entitlements: Map>; - tenant_entitlements: SingleEntityEntitlements; -}; - -export type BundlesSource = Map; diff --git a/src/clients/entitlements/storage/types.ts b/src/clients/entitlements/storage/types.ts index e960fcf..d03de16 100644 --- a/src/clients/entitlements/storage/types.ts +++ b/src/clients/entitlements/storage/types.ts @@ -1,9 +1,15 @@ -import { FeatureKey } from '../types'; +import { FeatureKey, TenantId, UserId } from '../types'; +import { Permission } from '../../identity/types'; export const NO_EXPIRE = -1; export type ExpirationTime = number | typeof NO_EXPIRE; -export interface EntitlementsCache { +export interface IEntitlementsCache { + /** + * The revision number to compare next entitlements cache versions. + */ + revision: number; + /** * Get the entitlement expiry time for given feature, tenant & user combination. */ @@ -28,3 +34,19 @@ export interface EntitlementsCache { */ shutdown(): Promise; } + +export const UNBUNDLED_SRC_ID = '__unbundled__'; +export type FeatureEntitlementKey = string; // tenant & user & feature key +export type FeatureSource = { + id: string; + key: FeatureKey; + permissions: Set; +}; +export type SingleEntityEntitlements = Map; +export type SingleBundleSource = { + id: string; + features: Map; + user_entitlements: Map>; + tenant_entitlements: SingleEntityEntitlements; +}; +export type BundlesSource = Map; diff --git a/src/clients/entitlements/types.ts b/src/clients/entitlements/types.ts index 0c4166a..a9d0a23 100644 --- a/src/clients/entitlements/types.ts +++ b/src/clients/entitlements/types.ts @@ -1,5 +1,6 @@ import { RetryOptions } from '../../utils'; import { Permission } from '../identity/types'; +import { ILeadershipElectionGivenOptions } from '../../components/leader-election/types'; export enum EntitlementJustifications { MISSING_FEATURE = 'missing-feature', @@ -39,7 +40,11 @@ export interface VendorEntitlementsSnapshotOffsetDto { } export interface EntitlementsClientOptions { + instanceId: string; initializationDelayMs: number; refreshTimeoutMs: number; retry: RetryOptions; + leaderElection: ILeadershipElectionGivenOptions; } + +export type EntitlementsClientGivenOptions = Partial; diff --git a/src/clients/identity/identity-client.ts b/src/clients/identity/identity-client.ts index 0d6a817..1281b43 100644 --- a/src/clients/identity/identity-client.ts +++ b/src/clients/identity/identity-client.ts @@ -3,19 +3,10 @@ import { FronteggAuthenticator } from '../../authenticator'; import { config } from '../../config'; import Logger from '../../components/logger'; import { FronteggContext } from '../../components/frontegg-context'; -import { - AuthHeaderType, - ExtractCredentialsResult, - ITenantApiToken, - IUser, - IUserApiToken, - IValidateTokenOptions, - TEntity, -} from './types'; +import { AuthHeaderType, ExtractCredentialsResult, IUser, IValidateTokenOptions, TEntity } from './types'; import { accessTokenHeaderResolver, authorizationHeaderResolver, TokenResolver } from './token-resolvers'; import { FailedToAuthenticateException } from './exceptions/failed-to-authenticate.exception'; import { IFronteggContext } from '../../components/frontegg-context/types'; -import { type } from 'os'; const tokenResolvers = [authorizationHeaderResolver, accessTokenHeaderResolver]; diff --git a/src/clients/identity/token-resolvers/access-token-resolver.ts b/src/clients/identity/token-resolvers/access-token-resolver.ts index efa35dc..bdd4336 100644 --- a/src/clients/identity/token-resolvers/access-token-resolver.ts +++ b/src/clients/identity/token-resolvers/access-token-resolver.ts @@ -10,7 +10,6 @@ import { import { FailedToAuthenticateException } from '../exceptions'; import { TokenResolver } from './token-resolver'; import { IAccessTokenService } from './access-token-services/access-token.service.interface'; -import { LocalCacheManager, IORedisCacheManager, RedisCacheManager } from '../../../cache'; import { CacheTenantAccessTokenService, CacheUserAccessTokenService, @@ -20,6 +19,7 @@ import { import { FronteggAuthenticator } from '../../../authenticator'; import { HttpClient } from '../../http'; import { FronteggContext } from '../../../components/frontegg-context'; +import { FronteggCache } from '../../../components/cache'; export class AccessTokenResolver extends TokenResolver { private authenticator: FronteggAuthenticator = new FronteggAuthenticator(); @@ -51,10 +51,8 @@ export class AccessTokenResolver extends TokenResolver { } return { - ...(entityWithRoles || ( - options?.withRolesAndPermissions ? await this.getEntity(entity) : {} - )), - ...entity + ...(entityWithRoles || (options?.withRolesAndPermissions ? await this.getEntity(entity) : {})), + ...entity, }; } @@ -65,7 +63,7 @@ export class AccessTokenResolver extends TokenResolver { FRONTEGG_API_KEY || process.env.FRONTEGG_API_KEY || '', ); - this.initAccessTokenServices(); + await this.initAccessTokenServices(); } protected getEntity(entity: IAccessToken): Promise { @@ -93,52 +91,16 @@ export class AccessTokenResolver extends TokenResolver { return service; } - private initAccessTokenServices(): void { + private async initAccessTokenServices(): Promise { if (this.accessTokenServices.length) { return; } - const accessTokensOptions = FronteggContext.getOptions().accessTokensOptions; - - if (accessTokensOptions?.cache?.type === 'ioredis') { - this.accessTokenServices = [ - new CacheTenantAccessTokenService( - new IORedisCacheManager(accessTokensOptions.cache.options), - new IORedisCacheManager(accessTokensOptions.cache.options), - new TenantAccessTokenService(this.httpClient), - ), - new CacheUserAccessTokenService( - new IORedisCacheManager(accessTokensOptions.cache.options), - new IORedisCacheManager(accessTokensOptions.cache.options), - new UserAccessTokenService(this.httpClient), - ), - ]; - } else if (accessTokensOptions?.cache?.type === 'redis') { - this.accessTokenServices = [ - new CacheTenantAccessTokenService( - new RedisCacheManager(accessTokensOptions.cache.options), - new RedisCacheManager(accessTokensOptions.cache.options), - new TenantAccessTokenService(this.httpClient), - ), - new CacheUserAccessTokenService( - new RedisCacheManager(accessTokensOptions.cache.options), - new RedisCacheManager(accessTokensOptions.cache.options), - new UserAccessTokenService(this.httpClient), - ), - ]; - } else { - this.accessTokenServices = [ - new CacheTenantAccessTokenService( - new LocalCacheManager(), - new LocalCacheManager(), - new TenantAccessTokenService(this.httpClient), - ), - new CacheUserAccessTokenService( - new LocalCacheManager(), - new LocalCacheManager(), - new UserAccessTokenService(this.httpClient), - ), - ]; - } + const cache = await FronteggCache.getInstance(); + + this.accessTokenServices = [ + new CacheTenantAccessTokenService(cache, new TenantAccessTokenService(this.httpClient)), + new CacheUserAccessTokenService(cache, new UserAccessTokenService(this.httpClient)), + ]; } } diff --git a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service.ts b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service-abstract.ts similarity index 75% rename from src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service.ts rename to src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service-abstract.ts index fa0b7a6..f41151e 100644 --- a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service.ts +++ b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-access-token.service-abstract.ts @@ -1,18 +1,25 @@ import { IAccessToken, IEmptyAccessToken, IEntityWithRoles, tokenTypes } from '../../../types'; import { IAccessTokenService } from '../access-token.service.interface'; -import { ICacheManager } from '../../../../../cache/cache.manager.interface'; +import { ICacheManager } from '../../../../../components/cache/managers'; import { FailedToAuthenticateException } from '../../../exceptions'; -export abstract class CacheAccessTokenService implements IAccessTokenService { - constructor( - public readonly entityCacheManager: ICacheManager, - public readonly activeAccessTokensCacheManager: ICacheManager, +export abstract class CacheAccessTokenServiceAbstract implements IAccessTokenService { + protected abstract getCachePrefix(): string; + + public readonly entityCacheManager: ICacheManager; + public readonly activeAccessTokensCacheManager: ICacheManager; + + protected constructor( + cacheManager: ICacheManager, public readonly accessTokenService: IAccessTokenService, public readonly type: tokenTypes.UserAccessToken | tokenTypes.TenantAccessToken, - ) {} + ) { + this.entityCacheManager = cacheManager.forScope(this.getCachePrefix()); + this.activeAccessTokensCacheManager = cacheManager.forScope(this.getCachePrefix()); + } public async getEntity(entity: T): Promise { - const cacheKey = `${this.getCachePrefix()}_${entity.sub}`; + const cacheKey = entity.sub; const cachedData = await this.entityCacheManager.get(cacheKey); if (cachedData) { @@ -38,7 +45,7 @@ export abstract class CacheAccessTokenService implements } public async getActiveAccessTokenIds(): Promise { - const cacheKey = `${this.getCachePrefix()}_ids`; + const cacheKey = `ids`; const cachedData = await this.activeAccessTokensCacheManager.get(cacheKey); if (cachedData) { @@ -66,6 +73,4 @@ export abstract class CacheAccessTokenService implements private isEmptyAccessToken(accessToken: IEntityWithRoles | IEmptyAccessToken): accessToken is IEmptyAccessToken { return 'empty' in accessToken && accessToken.empty; } - - protected abstract getCachePrefix(): string; } diff --git a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-tenant-access-token.service.ts b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-tenant-access-token.service.ts index 70f0dd9..9648e46 100644 --- a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-tenant-access-token.service.ts +++ b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-tenant-access-token.service.ts @@ -1,18 +1,14 @@ -import { ICacheManager } from '../../../../../cache/cache.manager.interface'; -import { IEmptyAccessToken, IEntityWithRoles, ITenantAccessToken, tokenTypes } from '../../../types'; +import { ICacheManager } from '../../../../../components/cache/managers/cache.manager.interface'; +import { ITenantAccessToken, tokenTypes } from '../../../types'; import { AccessTokenService } from '../services/access-token.service'; -import { CacheAccessTokenService } from './cache-access-token.service'; +import { CacheAccessTokenServiceAbstract } from './cache-access-token.service-abstract'; -export class CacheTenantAccessTokenService extends CacheAccessTokenService { - constructor( - public readonly entityCacheManager: ICacheManager, - public readonly activeAccessTokensCacheManager: ICacheManager, - public readonly tenantAccessTokenService: AccessTokenService, - ) { - super(entityCacheManager, activeAccessTokensCacheManager, tenantAccessTokenService, tokenTypes.TenantAccessToken); +export class CacheTenantAccessTokenService extends CacheAccessTokenServiceAbstract { + constructor(cacheManager: ICacheManager, tenantAccessTokenService: AccessTokenService) { + super(cacheManager, tenantAccessTokenService, tokenTypes.TenantAccessToken); } protected getCachePrefix(): string { - return 'frontegg_sdk_v1_user_access_tokens'; + return 'frontegg_sdk_v1_user_access_tokens_'; } } diff --git a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-user-access-token.service.ts b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-user-access-token.service.ts index f00ec35..174cb46 100644 --- a/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-user-access-token.service.ts +++ b/src/clients/identity/token-resolvers/access-token-services/cache-services/cache-user-access-token.service.ts @@ -1,18 +1,17 @@ -import { ICacheManager } from '../../../../../cache/cache.manager.interface'; -import { IEmptyAccessToken, IEntityWithRoles, IUserAccessToken, tokenTypes } from '../../../types'; +import { ICacheManager } from '../../../../../components/cache/managers/cache.manager.interface'; +import { IUserAccessToken, tokenTypes } from '../../../types'; import { AccessTokenService } from '../services/access-token.service'; -import { CacheAccessTokenService } from './cache-access-token.service'; +import { CacheAccessTokenServiceAbstract } from './cache-access-token.service-abstract'; -export class CacheUserAccessTokenService extends CacheAccessTokenService { +export class CacheUserAccessTokenService extends CacheAccessTokenServiceAbstract { constructor( - public readonly entityCacheManager: ICacheManager, - public readonly activeAccessTokensCacheManager: ICacheManager, + cacheManager: ICacheManager, public readonly userAccessTokenService: AccessTokenService, ) { - super(entityCacheManager, activeAccessTokensCacheManager, userAccessTokenService, tokenTypes.UserAccessToken); + super(cacheManager, userAccessTokenService, tokenTypes.UserAccessToken); } protected getCachePrefix(): string { - return 'frontegg_sdk_v1_tenant_access_tokens'; + return 'frontegg_sdk_v1_tenant_access_tokens_'; } } diff --git a/src/components/cache/index.spec.ts b/src/components/cache/index.spec.ts new file mode 100644 index 0000000..df12d44 --- /dev/null +++ b/src/components/cache/index.spec.ts @@ -0,0 +1,83 @@ +import { IIORedisCacheOptions, ILocalCacheOptions, IRedisCacheOptions } from '../frontegg-context/types'; +import { FronteggContext } from '../frontegg-context'; + +describe('FronteggContext', () => { + beforeEach(() => { + /** + * In this test suite we need to reset Node modules and import them in every test case, so "fresh" modules are provided. + * This is the way to deal with singletons defined in the scope of module. + */ + jest.resetModules(); + }); + + function mockCache(name: string) { + jest.mock('./managers'); + const { [name]: Manager } = require('./managers'); + + const cacheManagerMock = {}; + jest.mocked(Manager.create).mockResolvedValue(cacheManagerMock); + + return cacheManagerMock; + } + + describe.each([ + { + cacheConfigInfo: 'no cache', + config: {}, + expectedCacheName: 'LocalCacheManager', + }, + { + cacheConfigInfo: 'explicit local cache', + config: { + type: 'local', + } as ILocalCacheOptions, + expectedCacheName: 'LocalCacheManager', + }, + { + cacheConfigInfo: "type of 'ioredis' in `$.cache`", + config: { + cache: { + type: 'ioredis', + options: { host: 'foo', password: 'bar', db: 0, port: 6372 }, + } as IIORedisCacheOptions, + }, + expectedCacheName: 'IORedisCacheManager', + }, + { + cacheConfigInfo: "type of 'redis' in `$.cache`", + config: { + cache: { + type: 'redis', + options: { url: 'redis://url:6372' }, + } as IRedisCacheOptions, + }, + expectedCacheName: 'RedisCacheManager', + }, + ])('given $cacheConfigInfo configuration in FronteggContext', ({ config, expectedCacheName }) => { + let expectedCache; + + beforeEach(() => { + expectedCache = mockCache(expectedCacheName); + const { FronteggContext } = require('../frontegg-context'); + + FronteggContext.init( + { + FRONTEGG_CLIENT_ID: 'foo', + FRONTEGG_API_KEY: 'bar', + }, + config, + ); + }); + + it(`when cache is initialized, then the ${expectedCacheName} is returned.`, async () => { + // given + const { FronteggCache } = require('./index'); + + // when + const cache = await FronteggCache.getInstance(); + + // then + expect(cache).toBe(expectedCache); + }); + }); +}); diff --git a/src/components/cache/index.ts b/src/components/cache/index.ts new file mode 100644 index 0000000..4092ede --- /dev/null +++ b/src/components/cache/index.ts @@ -0,0 +1,29 @@ +import { FronteggContext } from '../frontegg-context'; +import { CacheValue, ICacheManager } from './managers'; +import { IORedisCacheManager, LocalCacheManager, RedisCacheManager } from './managers'; + +let cacheInstance: ICacheManager; + +export class FronteggCache { + static async getInstance(): Promise> { + if (!cacheInstance) { + cacheInstance = await FronteggCache.initialize(); + } + + return cacheInstance as ICacheManager; + } + + private static async initialize(): Promise> { + const options = FronteggContext.getOptions(); + const { cache } = options; + + switch (cache.type) { + case 'ioredis': + return IORedisCacheManager.create(cache.options); + case 'redis': + return RedisCacheManager.create(cache.options); + default: + return LocalCacheManager.create(); + } + } +} diff --git a/src/components/cache/managers/cache.manager.interface.ts b/src/components/cache/managers/cache.manager.interface.ts new file mode 100644 index 0000000..c804b55 --- /dev/null +++ b/src/components/cache/managers/cache.manager.interface.ts @@ -0,0 +1,43 @@ +export interface SetOptions { + expiresInSeconds: number; +} + +type Primitive = bigint | boolean | null | number | string | undefined | object; + +type JSONValue = Primitive | JSONObject | JSONArray; +export interface JSONObject { + [k: string]: JSONValue; +} +type JSONArray = JSONValue[]; + +export type CacheValue = JSONValue; + +export interface ICacheManager { + set(key: string, data: V, options?: SetOptions): Promise; + get(key: string): Promise; + del(key: string[]): Promise; + expire(keys: string[], ttlMs: number): Promise; + map(key: string): ICacheManagerMap; + collection(key: string): ICacheManagerCollection; + close(): Promise; + + /** + * This method should return the instance of ICacheManager with the same cache connector below, but scoped set/get + * methods to different type of values (defined by generic type S). + * + * If prefix is not given, the prefix of current instance should be used. + */ + forScope(prefix?: string): ICacheManager; +} + +export interface ICacheManagerMap { + set(field: string, data: T): Promise; + get(field: string): Promise; + del(field: string): Promise; +} + +export interface ICacheManagerCollection { + set(value: T): Promise; + has(value: T): Promise; + getAll(): Promise>; +} diff --git a/src/components/cache/managers/in-memory/local-cache.collection.ts b/src/components/cache/managers/in-memory/local-cache.collection.ts new file mode 100644 index 0000000..e28ca7e --- /dev/null +++ b/src/components/cache/managers/in-memory/local-cache.collection.ts @@ -0,0 +1,26 @@ +import * as NodeCache from 'node-cache'; +import { ICacheManagerCollection } from '../cache.manager.interface'; + +export class LocalCacheCollection implements ICacheManagerCollection { + constructor(private readonly key: string, private readonly cache: NodeCache) {} + + private ensureSetInCache(): Set { + if (!this.cache.has(this.key)) { + this.cache.set(this.key, new Set()); + } + + return this.cache.get(this.key)!; + } + + async has(value: T): Promise { + return this.ensureSetInCache().has(value); + } + + async set(value: T): Promise { + this.ensureSetInCache().add(value); + } + + async getAll(): Promise> { + return this.ensureSetInCache(); + } +} diff --git a/src/components/cache/managers/in-memory/local-cache.manager.spec.ts b/src/components/cache/managers/in-memory/local-cache.manager.spec.ts new file mode 100644 index 0000000..e25d1af --- /dev/null +++ b/src/components/cache/managers/in-memory/local-cache.manager.spec.ts @@ -0,0 +1,138 @@ +import { LocalCacheManager } from './local-cache.manager'; +import { LocalCacheCollection } from './local-cache.collection'; +import { LocalCacheMap } from './local-cache.map'; +import { CacheValue } from '../cache.manager.interface'; + +describe('Local cache manager', () => { + let localCacheManager: LocalCacheManager; + + const cacheKey = 'key'; + const cacheValue = { data: 'value' }; + + beforeEach(async () => { + localCacheManager = await LocalCacheManager.create(); + }); + + it('should set, get and delete from local cache manager', async () => { + await localCacheManager.set(cacheKey, cacheValue); + const res = await localCacheManager.get(cacheKey); + expect(res).toEqual(cacheValue); + await localCacheManager.del([cacheKey]); + const resAfterDel = await localCacheManager.get(cacheKey); + expect(resAfterDel).toEqual(null); + }); + + it('should get null after expiration time', async () => { + await localCacheManager.set(cacheKey, cacheValue, { expiresInSeconds: 1 }); + await new Promise((r) => setTimeout(r, 500)); + const res = await localCacheManager.get(cacheKey); + expect(res).toEqual(cacheValue); + + await new Promise((r) => setTimeout(r, 600)); + + const resAfterDel = await localCacheManager.get(cacheKey); + expect(resAfterDel).toEqual(null); + }); + + it('when .collection() is called, then instance of LocalCacheCollection is returned.', async () => { + // given + const cut = await LocalCacheManager.create(); + + // when & then + expect(cut.collection('my-key')).toBeInstanceOf(LocalCacheCollection); + }); + + it('when .hashmap() is called, then instance of LocalCacheMap is returned.', async () => { + // given + const cut = await LocalCacheManager.create(); + + // when & then + expect(cut.map('my-key')).toBeInstanceOf(LocalCacheMap); + }); + + describe('given collection instance is received by .collection(key)', () => { + let cut: LocalCacheManager; + + beforeEach(async () => { + cut = await LocalCacheManager.create(); + }); + + describe('with key that has not been created yet', () => { + it('when .set(value) is called, then the underlying Set is created.', async () => { + // given + await expect(cut.get('my-key')).resolves.toBeNull(); + + // when + await cut.collection('my-key').set('foo'); + + // then + await expect(cut.get('my-key')).resolves.toStrictEqual(new Set(['foo'])); + }); + }); + + describe('with key that has been created already', () => { + let existingCollection: Set; + + beforeEach(() => { + existingCollection = new Set(['foo']); + cut.set('my-key', existingCollection); + }); + + it('when .set(value) is called, then new value is stored in the existing Set.', async () => { + // when + await cut.collection('my-key').set('foo'); + + // then + const expectedSet = await cut.get('my-key'); + + expect(expectedSet).toBe(existingCollection); + + // and + expect((expectedSet as Set).has('foo')).toBeTruthy(); + }); + }); + }); + + describe('given map instance is received by .map(key)', () => { + let cut: LocalCacheManager; + + beforeEach(async () => { + cut = await LocalCacheManager.create(); + }); + + describe('with key that has not been created yet', () => { + it('when .set(field, value) is called, then the underlying Map is created.', async () => { + // given + await expect(cut.get('my-key')).resolves.toBeNull(); + + // when + await cut.map('my-key').set('foo', 'bar'); + + // then + await expect(cut.get('my-key')).resolves.toStrictEqual(new Map([['foo', 'bar']])); + }); + }); + + describe('with key that has been created already', () => { + let existingMap: Map; + + beforeEach(() => { + existingMap = new Map([['foo', 'bar']]); + cut.set('my-key', existingMap); + }); + + it('when .set(field, value) is called, then new value is stored in the existing Map.', async () => { + // when + await cut.map('my-key').set('x', 'y'); + + // then + const expectedMap = await cut.get('my-key'); + + expect(expectedMap).toBe(existingMap); + + // and + expect((expectedMap as Map).get('x')).toStrictEqual('y'); + }); + }); + }); +}); diff --git a/src/components/cache/managers/in-memory/local-cache.manager.ts b/src/components/cache/managers/in-memory/local-cache.manager.ts new file mode 100644 index 0000000..b644da6 --- /dev/null +++ b/src/components/cache/managers/in-memory/local-cache.manager.ts @@ -0,0 +1,66 @@ +import * as NodeCache from 'node-cache'; +import { LocalCacheMap } from './local-cache.map'; +import { LocalCacheCollection } from './local-cache.collection'; +import { PrefixedManager } from '../prefixed-manager.abstract'; +import { + CacheValue, + ICacheManager, + ICacheManagerCollection, + ICacheManagerMap, + SetOptions, +} from '../cache.manager.interface'; + +export class LocalCacheManager extends PrefixedManager implements ICacheManager { + private constructor(private readonly nodeCache: NodeCache, prefix = '') { + super(prefix); + } + + static async create(prefix = ''): Promise> { + return new LocalCacheManager( + new NodeCache({ + useClones: false, + }), + prefix, + ); + } + + public async set(key: string, data: T, options?: SetOptions): Promise { + if (options?.expiresInSeconds) { + this.nodeCache.set(this.withPrefix(key), data, options.expiresInSeconds); + } else { + this.nodeCache.set(this.withPrefix(key), data); + } + } + + public async get(key: string): Promise { + return this.nodeCache.get(this.withPrefix(key)) || null; + } + + public async del(key: string[]): Promise { + if (key.length) { + this.nodeCache.del(key.map(this.withPrefix.bind(this))); + } + } + + async expire(keys: string[], ttlMs: number): Promise { + const ttlSec = Math.round(ttlMs / 1000); + + keys.forEach((key) => this.nodeCache.ttl(this.withPrefix(key), ttlSec)); + } + + map(key: string): ICacheManagerMap { + return new LocalCacheMap(this.withPrefix(key), this.nodeCache); + } + + collection(key: string): ICacheManagerCollection { + return new LocalCacheCollection(this.withPrefix(key), this.nodeCache); + } + + forScope(prefix?: string): ICacheManager { + return new LocalCacheManager(this.nodeCache, prefix ?? this.prefix); + } + + async close(): Promise { + this.nodeCache.close(); + } +} diff --git a/src/components/cache/managers/in-memory/local-cache.map.ts b/src/components/cache/managers/in-memory/local-cache.map.ts new file mode 100644 index 0000000..9f69df1 --- /dev/null +++ b/src/components/cache/managers/in-memory/local-cache.map.ts @@ -0,0 +1,24 @@ +import * as NodeCache from 'node-cache'; +import { ICacheManagerMap } from '../cache.manager.interface'; + +export class LocalCacheMap implements ICacheManagerMap { + constructor(private readonly key: string, private readonly cache: NodeCache) {} + + private ensureMapInCache(): Map { + if (!this.cache.has(this.key)) { + this.cache.set(this.key, new Map()); + } + + return this.cache.get(this.key)!; + } + + async del(field: string): Promise { + this.ensureMapInCache().delete(field); + } + async get(field: string): Promise { + return this.ensureMapInCache().get(field) || null; + } + async set(field: string, data: T): Promise { + this.ensureMapInCache().set(field, data); + } +} diff --git a/src/components/cache/managers/index.ts b/src/components/cache/managers/index.ts new file mode 100644 index 0000000..6b07fc6 --- /dev/null +++ b/src/components/cache/managers/index.ts @@ -0,0 +1,4 @@ +export * from './cache.manager.interface'; +export * from './in-memory/local-cache.manager'; +export * from './ioredis/ioredis-cache.manager'; +export * from './redis/redis-cache.manager'; diff --git a/src/components/cache/managers/ioredis/ioredis-cache.collection.ts b/src/components/cache/managers/ioredis/ioredis-cache.collection.ts new file mode 100644 index 0000000..aa333d7 --- /dev/null +++ b/src/components/cache/managers/ioredis/ioredis-cache.collection.ts @@ -0,0 +1,25 @@ +import IORedis from 'ioredis'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { CacheValue, ICacheManagerCollection } from '../cache.manager.interface'; + +export class IORedisCacheCollection implements ICacheManagerCollection { + constructor( + private readonly key: string, + private readonly redis: IORedis, + private readonly serializer: ICacheValueSerializer, + ) {} + + async set(value: T): Promise { + await this.redis.sadd(this.key, this.serializer.serialize(value)); + } + + async has(value: T): Promise { + return (await this.redis.sismember(this.key, this.serializer.serialize(value))) > 0; + } + + async getAll(): Promise> { + const members = (await this.redis.smembers(this.key)).map((v) => this.serializer.deserialize(v)); + + return new Set(members); + } +} diff --git a/src/components/cache/managers/ioredis/ioredis-cache.manager.spec.ts b/src/components/cache/managers/ioredis/ioredis-cache.manager.spec.ts new file mode 100644 index 0000000..37416a7 --- /dev/null +++ b/src/components/cache/managers/ioredis/ioredis-cache.manager.spec.ts @@ -0,0 +1,140 @@ +import 'jest-extended'; +import { IORedisCacheManager } from './ioredis-cache.manager'; +import IORedis from 'ioredis'; +import { CacheValue } from '../cache.manager.interface'; + +// TODO: define all tests of Redis-based ICacheManager implementations in single file, only change the implementation +// for runs + +function delay(ms: number): Promise { + return new Promise((r) => setTimeout(r, ms)); +} + +describe(IORedisCacheManager.name, () => { + let cut: IORedisCacheManager; + let redisTestConnection: IORedis; + + beforeAll(async () => { + // initialize test Redis connection + redisTestConnection = new IORedis(36279, 'localhost'); + + // initial clean-up of used key + await redisTestConnection.del('key'); + + cut = await IORedisCacheManager.create({ host: 'localhost', port: 36279 }); + }); + + afterEach(async () => { + await redisTestConnection.del('key'); + }); + + afterAll(async () => { + await cut.close(); + await redisTestConnection.quit(); + }); + + describe('given simple key/value with key "key"', () => { + it('when .set("key", "value") is called, then it is stored in Redis as JSON-encoded string.', async () => { + // when + await cut.set('key', 'value'); + + // then + await expect(redisTestConnection.get('key')).resolves.toStrictEqual('"value"'); + }); + + describe('given .set("key", "value", options) has been called with expiration time, then after expiration', () => { + beforeEach(() => cut.set('key', 'value', { expiresInSeconds: 1 })); + + it('when expiration time has not passed yet, then it is kept in Redis.', async () => { + // when & then + await expect(redisTestConnection.exists('key')).resolves.toBeGreaterThan(0); + }); + + it('when expiration time has passed already, then it is removed from Redis.', async () => { + // when + await delay(1500); + + // then + await expect(redisTestConnection.exists('key')).resolves.toEqual(0); + }); + }); + + describe('and in Redis key "key" there is JSON-encoded string \'"foo"\' stored', () => { + beforeEach(() => redisTestConnection.set('key', '"foo"')); + + it('when .get("key") is called, then it resolves to string "foo".', async () => { + // when + await expect(cut.get('key')).resolves.toStrictEqual('foo'); + }); + + it('when .del("key") is called, then key "key" is removed from Redis DB.', async () => { + // when + await cut.del(['key']); + + // then + await expect(redisTestConnection.exists('key')).resolves.toEqual(0); + }); + }); + }); + + describe('given .map("key") is called', () => { + it('when map\'s .set("field", "value") is called, then it is stored in Redis Hashset as JSON-encoded string.', async () => { + // when + await cut.map('key').set('field', 'value'); + + // then + await expect(redisTestConnection.hget('key', 'field')).resolves.toEqual('"value"'); + }); + + describe('and in Redis Hashset with field "foo" is already storing JSON-encoded value \'"bar"\'', () => { + beforeEach(() => redisTestConnection.hset('key', 'foo', '"bar"')); + + it('when map\'s .get("foo") is called, then it resolves to value "bar".', async () => { + // when & then + await expect(cut.map('key').get('foo')).resolves.toStrictEqual('bar'); + }); + + it('when map\'s .get("baz") is called, then it resolves to NULL. [non-existing key]', async () => { + // when & then + await expect(cut.map('key').get('baz')).resolves.toBeNull(); + }); + + it('when map\'s .del("foo") is called, then it drops the field "foo" from hashset "key".', async () => { + // when + await expect(cut.map('key').del('foo')).toResolve(); + + // then + await expect(redisTestConnection.hexists('key', 'foo')).resolves.toEqual(0); + }); + }); + }); + + describe('given .collection("key") is called', () => { + it('when collection\'s .set("value") is called, then it is stored in Redis Set as JSON-encoded string.', async () => { + // when + await cut.collection('key').set('value'); + + // then + await expect(redisTestConnection.sismember('key', '"value"')).resolves.toBeTruthy(); + }); + + describe('and in Redis Set value JSON-encoded value \'"foo"\' is stored', () => { + beforeEach(() => redisTestConnection.sadd('key', '"foo"')); + + it('when collection\'s .getAll() is called, then it resolves to the Set instance with value "foo".', async () => { + // when & then + await expect(cut.collection('key').getAll()).resolves.toStrictEqual(new Set(['foo'])); + }); + + it('when collection\'s .has("foo") is called, then it resolves to TRUE.', async () => { + // when & then + await expect(cut.collection('key').has('foo')).resolves.toBeTrue(); + }); + + it('when collection\'s .has("non-existing-field") is called, then it resolves to FALSE.', async () => { + // when & then + await expect(cut.collection('key').has('non-existing-field')).resolves.toBeFalsy(); + }); + }); + }); +}); diff --git a/src/components/cache/managers/ioredis/ioredis-cache.manager.ts b/src/components/cache/managers/ioredis/ioredis-cache.manager.ts new file mode 100644 index 0000000..39b290b --- /dev/null +++ b/src/components/cache/managers/ioredis/ioredis-cache.manager.ts @@ -0,0 +1,78 @@ +import type { Redis } from 'ioredis'; +import { PackageUtils } from '../../../../utils/package-loader'; +import { PrefixedManager } from '../prefixed-manager.abstract'; +import { + CacheValue, + ICacheManager, + ICacheManagerCollection, + ICacheManagerMap, + SetOptions, +} from '../cache.manager.interface'; +import { IORedisCacheMap } from './ioredis-cache.map'; +import { IORedisCacheCollection } from './ioredis-cache.collection'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { JsonSerializer } from '../../serializers/json.serializer'; +import type { RedisOptions } from 'ioredis'; + +export type IIORedisOptions = RedisOptions; + +export class IORedisCacheManager extends PrefixedManager implements ICacheManager { + private readonly serializer: ICacheValueSerializer; + + private constructor(private readonly redisManager: Redis, prefix = '') { + super(prefix); + + this.serializer = new JsonSerializer(); + } + + static async create(options?: IIORedisOptions, prefix = ''): Promise> { + const RedisCtor = PackageUtils.loadPackage('ioredis'); + + return new IORedisCacheManager(new RedisCtor(options), prefix); + } + + public async set(key: string, data: V, options?: SetOptions): Promise { + if (options?.expiresInSeconds) { + await this.redisManager.set(this.withPrefix(key), JSON.stringify(data), 'EX', options.expiresInSeconds); + } else { + await this.redisManager.set(this.withPrefix(key), JSON.stringify(data)); + } + } + + public async get(key: string): Promise { + const stringifiedData = await this.redisManager.get(this.withPrefix(key)); + return stringifiedData ? JSON.parse(stringifiedData) : null; + } + + public async del(key: string[]): Promise { + if (key.length) { + await this.redisManager.del(key.map(this.withPrefix.bind(this))); + } + } + + async expire(keys: string[], ttlMs: number): Promise { + for (const key of keys) { + await this.redisManager.pexpire(this.withPrefix(key), ttlMs); + } + } + + forScope(prefix?: string): ICacheManager { + return new IORedisCacheManager(this.redisManager, prefix ?? this.prefix); + } + + map(key: string): ICacheManagerMap { + return new IORedisCacheMap(this.withPrefix(key), this.redisManager, this.serializer); + } + + collection(key: string): ICacheManagerCollection { + return new IORedisCacheCollection(this.withPrefix(key), this.redisManager, this.serializer); + } + + getRedis(): Redis { + return this.redisManager; + } + + async close(): Promise { + await this.redisManager.quit(); + } +} diff --git a/src/components/cache/managers/ioredis/ioredis-cache.map.ts b/src/components/cache/managers/ioredis/ioredis-cache.map.ts new file mode 100644 index 0000000..334b9fc --- /dev/null +++ b/src/components/cache/managers/ioredis/ioredis-cache.map.ts @@ -0,0 +1,25 @@ +import type IORedis from 'ioredis'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { ICacheManagerMap, CacheValue } from '../cache.manager.interface'; + +export class IORedisCacheMap implements ICacheManagerMap { + constructor( + private readonly key: string, + private readonly redis: IORedis, + private readonly serializer: ICacheValueSerializer, + ) {} + + async set(field: string, data: T): Promise { + await this.redis.hset(this.key, field, this.serializer.serialize(data)); + } + + async get(field: string): Promise { + const raw = await this.redis.hget(this.key, field); + + return raw !== null ? this.serializer.deserialize(raw) : null; + } + + async del(field: string): Promise { + await this.redis.hdel(this.key, field); + } +} diff --git a/src/components/cache/managers/prefixed-manager.abstract.ts b/src/components/cache/managers/prefixed-manager.abstract.ts new file mode 100644 index 0000000..e77f967 --- /dev/null +++ b/src/components/cache/managers/prefixed-manager.abstract.ts @@ -0,0 +1,7 @@ +export abstract class PrefixedManager { + protected constructor(protected readonly prefix: string = '') {} + + protected withPrefix(key: string): string { + return this.prefix + key; + } +} diff --git a/src/components/cache/managers/redis/redis-cache.collection.ts b/src/components/cache/managers/redis/redis-cache.collection.ts new file mode 100644 index 0000000..629d0ae --- /dev/null +++ b/src/components/cache/managers/redis/redis-cache.collection.ts @@ -0,0 +1,25 @@ +import { RedisClientType } from 'redis'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { CacheValue, ICacheManagerCollection } from '../cache.manager.interface'; + +export class RedisCacheCollection implements ICacheManagerCollection { + constructor( + private readonly key: string, + private readonly redis: RedisClientType, + private readonly serializer: ICacheValueSerializer, + ) {} + + async set(value: T): Promise { + await this.redis.SADD(this.key, this.serializer.serialize(value)); + } + + async has(value: T): Promise { + return await this.redis.SISMEMBER(this.key, this.serializer.serialize(value)); + } + + async getAll(): Promise> { + const members = (await this.redis.SMEMBERS(this.key)).map((v) => this.serializer.deserialize(v)); + + return new Set(members); + } +} diff --git a/src/components/cache/managers/redis/redis-cache.manager.spec.ts b/src/components/cache/managers/redis/redis-cache.manager.spec.ts new file mode 100644 index 0000000..d079c78 --- /dev/null +++ b/src/components/cache/managers/redis/redis-cache.manager.spec.ts @@ -0,0 +1,143 @@ +import 'jest-extended'; +import IORedis from 'ioredis'; +import { RedisCacheManager } from './redis-cache.manager'; +import { CacheValue } from '../cache.manager.interface'; + +// TODO: define all tests of Redis-based ICacheManager implementations in single file, only change the implementation +// for runs + +function delay(ms: number): Promise { + return new Promise((r) => setTimeout(r, ms)); +} + +describe(RedisCacheManager.name, () => { + let cut: RedisCacheManager; + let redisTestConnection: IORedis; + + beforeAll(async () => { + // initialize test Redis connection + redisTestConnection = new IORedis(36279, 'localhost'); + + // initial clean-up of used key + await redisTestConnection.del('key'); + + cut = await RedisCacheManager.create({ url: 'redis://localhost:36279' }); + }); + + afterEach(async () => { + await redisTestConnection.del('key'); + }); + + afterAll(async () => { + await cut.close(); + await redisTestConnection.quit(); + }); + + describe('given simple key/value with key "key"', () => { + it('when .set("key", "value") is called, then it is stored in Redis as JSON-encoded string.', async () => { + // when + await cut.set('key', 'value'); + + // then + await expect(redisTestConnection.get('key')).resolves.toStrictEqual('"value"'); + }); + + describe('given .set("key", "value", options) has been called with expiration time, then after expiration', () => { + beforeEach(() => cut.set('key', 'value', { expiresInSeconds: 1 })); + + it('when expiration time has not passed yet, then it is kept in Redis.', async () => { + // when + await delay(100); + + // then + await expect(redisTestConnection.exists('key')).resolves.toBeGreaterThan(0); + }); + + it('when expiration time has passed already, then it is removed from Redis.', async () => { + // when + await delay(1500); + + // then + await expect(redisTestConnection.exists('key')).resolves.toEqual(0); + }); + }); + + describe('and in Redis key "key" there is JSON-encoded string \'"foo"\' stored', () => { + beforeEach(() => redisTestConnection.set('key', '"foo"')); + + it('when .get("key") is called, then it resolves to string "foo".', async () => { + // when + await expect(cut.get('key')).resolves.toStrictEqual('foo'); + }); + + it('when .del("key") is called, then key "key" is removed from Redis DB.', async () => { + // when + await cut.del(['key']); + + // then + await expect(redisTestConnection.exists('key')).resolves.toEqual(0); + }); + }); + }); + + describe('given .map("key") is called', () => { + it('when map\'s .set("field", "value") is called, then it is stored in Redis Hashset as JSON-encoded string.', async () => { + // when + await cut.map('key').set('field', 'value'); + + // then + await expect(redisTestConnection.hget('key', 'field')).resolves.toEqual('"value"'); + }); + + describe('and in Redis Hashset with field "foo" is already storing JSON-encoded value \'"bar"\'', () => { + beforeEach(() => redisTestConnection.hset('key', 'foo', '"bar"')); + + it('when map\'s .get("foo") is called, then it resolves to value "bar".', async () => { + // when & then + await expect(cut.map('key').get('foo')).resolves.toStrictEqual('bar'); + }); + + it('when map\'s .get("baz") is called, then it resolves to NULL. [non-existing key]', async () => { + // when & then + await expect(cut.map('key').get('baz')).resolves.toBeNull(); + }); + + it('when map\'s .del("foo") is called, then it drops the field "foo" from hashset "key".', async () => { + // when + await expect(cut.map('key').del('foo')).toResolve(); + + // then + await expect(redisTestConnection.hexists('key', 'foo')).resolves.toEqual(0); + }); + }); + }); + + describe('given .collection("key") is called', () => { + it('when collection\'s .set("value") is called, then it is stored in Redis Set as JSON-encoded string.', async () => { + // when + await cut.collection('key').set('value'); + + // then + await expect(redisTestConnection.sismember('key', '"value"')).resolves.toBeTruthy(); + }); + + describe('and in Redis Set value JSON-encoded value \'"foo"\' is stored', () => { + beforeEach(() => redisTestConnection.sadd('key', '"foo"')); + + it('when collection\'s .getAll() is called, then it resolves to the Set instance with value "foo".', async () => { + // when & then + await expect(cut.collection('key').getAll()).resolves.toStrictEqual(new Set(['foo'])); + }); + + it('when collection\'s .has("foo") is called, then it resolves to TRUE.', async () => { + // when & then + await expect(cut.collection('key').has('foo')).resolves.toBeTrue(); + }); + + it('when collection\'s .has("non-existing-field") is called, then it resolves to FALSE.', async () => { + // when & then + await expect(cut.collection('key').has('non-existing-field')).resolves.toBeFalsy(); + }); + }); + }); +}); diff --git a/src/components/cache/managers/redis/redis-cache.manager.ts b/src/components/cache/managers/redis/redis-cache.manager.ts new file mode 100644 index 0000000..5fb1563 --- /dev/null +++ b/src/components/cache/managers/redis/redis-cache.manager.ts @@ -0,0 +1,92 @@ +import { PackageUtils } from '../../../../utils/package-loader'; +import Logger from '../../../logger'; +import type { RedisClientType } from 'redis'; +import { RedisCacheMap } from './redis-cache.map'; +import { RedisCacheCollection } from './redis-cache.collection'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { JsonSerializer } from '../../serializers/json.serializer'; +import { PrefixedManager } from '../prefixed-manager.abstract'; +import type * as Redis from 'redis'; +import { + CacheValue, + ICacheManager, + ICacheManagerCollection, + ICacheManagerMap, + SetOptions, +} from '../cache.manager.interface'; + +export interface IRedisOptions { + url: string; +} + +export class RedisCacheManager extends PrefixedManager implements ICacheManager { + private readonly serializer: ICacheValueSerializer; + + private readonly isReadyPromise: Promise; + + private constructor(private readonly redisManager: RedisClientType, prefix = '') { + super(prefix); + + this.serializer = new JsonSerializer(); + + this.isReadyPromise = this.redisManager.connect(); + this.isReadyPromise.catch((e) => Logger.error('Failed to connect to redis', e)); + } + + static create(options: IRedisOptions, prefix = ''): Promise> { + const { createClient } = PackageUtils.loadPackage('redis') as typeof Redis; + + return new RedisCacheManager(createClient(options), prefix).ready(); + } + + ready(): Promise { + return this.isReadyPromise.then(() => this); + } + + forScope(prefix?: string): ICacheManager { + return new RedisCacheManager(this.redisManager, prefix ?? this.prefix); + } + + map(key: string): ICacheManagerMap { + return new RedisCacheMap(this.withPrefix(key), this.redisManager, this.serializer); + } + + collection(key: string): ICacheManagerCollection { + return new RedisCacheCollection(this.withPrefix(key), this.redisManager, this.serializer); + } + + public async set(key: string, data: V, options?: SetOptions): Promise { + if (options?.expiresInSeconds) { + await this.redisManager.set(this.withPrefix(key), this.serializer.serialize(data), { + EX: options.expiresInSeconds, + }); + } else { + await this.redisManager.set(this.withPrefix(key), this.serializer.serialize(data)); + } + } + + public async get(key: string): Promise { + const rawData = await this.redisManager.get(this.withPrefix(key)); + return rawData ? this.serializer.deserialize(rawData) : null; + } + + public async del(key: string[]): Promise { + if (key.length) { + await this.redisManager.del(key.map(this.withPrefix.bind(this))); + } + } + + async expire(keys: string[], ttlMs: number): Promise { + for (const key of keys) { + await this.redisManager.PEXPIRE(this.withPrefix(key), ttlMs); + } + } + + getRedis(): RedisClientType { + return this.redisManager; + } + + close(): Promise { + return this.redisManager.disconnect(); + } +} diff --git a/src/components/cache/managers/redis/redis-cache.map.ts b/src/components/cache/managers/redis/redis-cache.map.ts new file mode 100644 index 0000000..c4eb280 --- /dev/null +++ b/src/components/cache/managers/redis/redis-cache.map.ts @@ -0,0 +1,25 @@ +import { RedisClientType } from 'redis'; +import { ICacheValueSerializer } from '../../serializers/types'; +import { CacheValue, ICacheManagerMap } from '../cache.manager.interface'; + +export class RedisCacheMap implements ICacheManagerMap { + constructor( + private readonly key: string, + private readonly redis: RedisClientType, + private readonly serializer: ICacheValueSerializer, + ) {} + + async set(field: string, data: T): Promise { + await this.redis.HSET(this.key, field, this.serializer.serialize(data)); + } + + async get(field: string): Promise { + const raw = await this.redis.HGET(this.key, field); + + return raw !== undefined ? this.serializer.deserialize(raw) : null; + } + + async del(field: string): Promise { + await this.redis.HDEL(this.key, field); + } +} diff --git a/src/components/cache/serializers/json.serializer.ts b/src/components/cache/serializers/json.serializer.ts new file mode 100644 index 0000000..c4d9b40 --- /dev/null +++ b/src/components/cache/serializers/json.serializer.ts @@ -0,0 +1,11 @@ +import { ICacheValueSerializer } from './types'; + +export class JsonSerializer implements ICacheValueSerializer { + serialize(data: T): string { + return JSON.stringify(data); + } + + deserialize(raw: string): T { + return JSON.parse(raw) as T; + } +} diff --git a/src/components/cache/serializers/types.ts b/src/components/cache/serializers/types.ts new file mode 100644 index 0000000..95ab81c --- /dev/null +++ b/src/components/cache/serializers/types.ts @@ -0,0 +1,4 @@ +export interface ICacheValueSerializer { + serialize(data: T): string; + deserialize(raw: string): T; +} diff --git a/src/components/frontegg-context/index.ts b/src/components/frontegg-context/index.ts index 31dcda4..672ec25 100644 --- a/src/components/frontegg-context/index.ts +++ b/src/components/frontegg-context/index.ts @@ -1,6 +1,12 @@ -import { IIORedisCacheOptions, IRedisCacheOptions } from '../../cache/types'; import { PackageUtils } from '../../utils/package-loader'; -import { IFronteggContext, IFronteggOptions, IAccessTokensOptions } from './types'; +import { IFronteggContext, IFronteggOptions, IFronteggCacheOptions } from './types'; +import { IIORedisOptions, IRedisOptions } from '../cache/managers'; + +const DEFAULT_OPTIONS: IFronteggOptions = { + cache: { + type: 'local', + }, +}; export class FronteggContext { public static getInstance(): FronteggContext { @@ -11,10 +17,12 @@ export class FronteggContext { return FronteggContext.instance; } - public static init(context: IFronteggContext, options?: IFronteggOptions) { - FronteggContext.getInstance().context = context; + public static init(context: IFronteggContext, givenOptions?: Partial) { + const options = FronteggContext.prepareOptions(givenOptions); FronteggContext.getInstance().validateOptions(options); - FronteggContext.getInstance().options = options ?? {}; + FronteggContext.getInstance().options = options; + + FronteggContext.getInstance().context = context; } public static getContext(): IFronteggContext { @@ -27,37 +35,35 @@ export class FronteggContext { } public static getOptions(): IFronteggOptions { - return FronteggContext.getInstance().options || {}; + return FronteggContext.getInstance().options; } private static instance: FronteggContext; private context: IFronteggContext | null = null; - private options: IFronteggOptions = {}; - - private constructor() {} + private options: IFronteggOptions; - private validateOptions(options?: IFronteggOptions): void { - if (options?.accessTokensOptions) { - this.validateAccessTokensOptions(options.accessTokensOptions); - } + private constructor() { + this.options = DEFAULT_OPTIONS; } - private validateAccessTokensOptions(accessTokensOptions: IAccessTokensOptions): void { - if (!accessTokensOptions.cache) { - throw new Error(`'cache' is missing from access tokens options`); + private validateOptions(options: Partial): void { + if (options.cache) { + this.validateCacheOptions(options.cache); } + } - if (accessTokensOptions.cache.type === 'ioredis') { - this.validateIORedisOptions(accessTokensOptions.cache.options); - } else if (accessTokensOptions.cache.type === 'redis') { - this.validateRedisOptions(accessTokensOptions.cache.options); + private validateCacheOptions(cache: IFronteggCacheOptions): void { + if (cache.type === 'ioredis') { + this.validateIORedisOptions(cache.options); + } else if (cache.type === 'redis') { + this.validateRedisOptions(cache.options); } } - private validateIORedisOptions(redisOptions: IIORedisCacheOptions): void { + private validateIORedisOptions(redisOptions: IIORedisOptions): void { PackageUtils.loadPackage('ioredis'); - const requiredProperties: (keyof IIORedisCacheOptions)[] = ['host', 'port']; + const requiredProperties: (keyof IIORedisOptions)[] = ['host', 'port']; requiredProperties.forEach((requiredProperty) => { if (redisOptions[requiredProperty] === undefined) { throw new Error(`${requiredProperty} is missing from ioredis cache options`); @@ -65,14 +71,21 @@ export class FronteggContext { }); } - private validateRedisOptions(redisOptions: IRedisCacheOptions): void { + private validateRedisOptions(redisOptions: IRedisOptions): void { PackageUtils.loadPackage('redis'); - const requiredProperties: (keyof IRedisCacheOptions)[] = ['url']; + const requiredProperties: (keyof IRedisOptions)[] = ['url']; requiredProperties.forEach((requiredProperty) => { if (redisOptions[requiredProperty] === undefined) { throw new Error(`${requiredProperty} is missing from redis cache options`); } }); } + + private static prepareOptions(options?: Partial): IFronteggOptions { + return { + ...DEFAULT_OPTIONS, + ...(options || {}), + }; + } } diff --git a/src/components/frontegg-context/types.ts b/src/components/frontegg-context/types.ts index 80e9cca..46c5da9 100644 --- a/src/components/frontegg-context/types.ts +++ b/src/components/frontegg-context/types.ts @@ -1,4 +1,4 @@ -import { IIORedisCacheOptions, IRedisCacheOptions } from '../../cache/types'; +import { IIORedisOptions, IRedisOptions } from '../cache/managers'; export interface IFronteggContext { FRONTEGG_CLIENT_ID: string; @@ -6,28 +6,25 @@ export interface IFronteggContext { } export interface IFronteggOptions { - cache?: IAccessTokensLocalCache | IAccessTokensIORedisCache | IAccessTokensRedisCache; - accessTokensOptions?: IAccessTokensOptions; + cache: IFronteggCacheOptions; } -export interface IAccessTokensOptions { - cache: IAccessTokensLocalCache | IAccessTokensIORedisCache | IAccessTokensRedisCache; -} - -export interface IAccessTokensCache { +export interface IBaseCacheOptions { type: 'ioredis' | 'local' | 'redis'; } -export interface IAccessTokensLocalCache extends IAccessTokensCache { +export interface ILocalCacheOptions extends IBaseCacheOptions { type: 'local'; } -export interface IAccessTokensIORedisCache extends IAccessTokensCache { +export interface IIORedisCacheOptions extends IBaseCacheOptions { type: 'ioredis'; - options: IIORedisCacheOptions; + options: IIORedisOptions; } -export interface IAccessTokensRedisCache extends IAccessTokensCache { +export interface IRedisCacheOptions extends IBaseCacheOptions { type: 'redis'; - options: IRedisCacheOptions; + options: IRedisOptions; } + +export type IFronteggCacheOptions = ILocalCacheOptions | IIORedisCacheOptions | IRedisCacheOptions; diff --git a/src/components/leader-election/always-leader.lock-handler.ts b/src/components/leader-election/always-leader.lock-handler.ts new file mode 100644 index 0000000..6030722 --- /dev/null +++ b/src/components/leader-election/always-leader.lock-handler.ts @@ -0,0 +1,11 @@ +import { ILockHandler } from './types'; + +export class AlwaysLeaderLockHandler implements ILockHandler { + async tryToMaintainTheLock(_key: string, _value: string, _expirationTimeMs: number): Promise { + return true; + } + + async tryToLockLeaderResource(_key: string, _value: string, _expirationTimeMs: number): Promise { + return true; + } +} diff --git a/src/components/leader-election/factory.ts b/src/components/leader-election/factory.ts new file mode 100644 index 0000000..2a48676 --- /dev/null +++ b/src/components/leader-election/factory.ts @@ -0,0 +1,31 @@ +import { CacheValue, ICacheManager, IORedisCacheManager, RedisCacheManager } from '../cache/managers'; +import { ILeadershipElectionGivenOptions } from './types'; +import { RedisLockHandler } from './redis.lock-handler'; +import { IORedisLockHandler } from './ioredis.lock-handler'; +import { AlwaysLeaderLockHandler } from './always-leader.lock-handler'; +import { LeaderElection } from './index'; + +export class LeaderElectionFactory { + static fromCache( + identifier: string, + manager: ICacheManager, + options: ILeadershipElectionGivenOptions, + ): LeaderElection { + switch (true) { + case manager instanceof RedisCacheManager: + return new LeaderElection( + new RedisLockHandler((manager as RedisCacheManager).getRedis()), + identifier, + options, + ); + case manager instanceof IORedisCacheManager: + return new LeaderElection( + new IORedisLockHandler((manager as IORedisCacheManager).getRedis()), + identifier, + options, + ); + default: + return new LeaderElection(new AlwaysLeaderLockHandler(), identifier, options); + } + } +} diff --git a/src/components/leader-election/index.spec.ts b/src/components/leader-election/index.spec.ts new file mode 100644 index 0000000..189d8a1 --- /dev/null +++ b/src/components/leader-election/index.spec.ts @@ -0,0 +1,175 @@ +import 'jest-extended'; +import { LeaderElection } from './index'; +import { mock, mockClear, mockReset } from 'jest-mock-extended'; +import { ILockHandler } from './types'; +import { SinonFakeTimers, useFakeTimers } from 'sinon'; + +describe(LeaderElection.name, () => { + let cut: LeaderElection; + let fakeTimer: SinonFakeTimers; + + const lockHandlerMock = mock(); + + beforeEach(() => { + cut = new LeaderElection(lockHandlerMock, 'foo', { + key: 'my-lock-key', + expireInMs: 5000, + prolongLeadershipIntervalMs: 2000, + }); + + fakeTimer = useFakeTimers(); + }); + + afterEach(() => { + cut.close(); + + // + mockReset(lockHandlerMock); + fakeTimer.restore(); + }); + + describe('when the instance is not started manually', () => { + it('then the resource locking tries are not performed.', async () => { + // when + await fakeTimer.tickAsync(10000); + + // then + expect(lockHandlerMock.tryToLockLeaderResource).not.toHaveBeenCalled(); + }); + }); + + describe('given the instance is started', () => { + beforeEach(() => { + cut.start(); + }); + + it('when it closed, then the instance no longer tries to lock the resource.', async () => { + // given: when started, it tries to lock the resource + await fakeTimer.tickAsync(6000); + expect(lockHandlerMock.tryToLockLeaderResource).toHaveBeenCalled(); + + mockClear(lockHandlerMock); + + // when: stopped + cut.close(); + + // and: some time elapsed + await fakeTimer.tickAsync(10000); + + // then: resource is no longer being tried to lock + expect(lockHandlerMock.tryToLockLeaderResource).not.toHaveBeenCalled(); + }); + + describe('and resource is already locked', () => { + beforeEach(() => { + lockHandlerMock.tryToLockLeaderResource.mockResolvedValue(false); + }); + + it('when the instance fails in locking the resource, then "leader" event is NOT emitted.', async () => { + // given + const onLeader = jest.fn(); + cut.on('leader', onLeader); + + // when + await fakeTimer.tickAsync(6000); + + // then + expect(onLeader).not.toHaveBeenCalled(); + }); + }); + + describe('and resource is not locked', () => { + beforeEach(() => { + lockHandlerMock.tryToLockLeaderResource.mockResolvedValue(true); + }); + + it('when the instance succeeded in locking the resource, then "leader" event is emitted.', async () => { + // given + const onLeader = jest.fn(); + cut.on('leader', onLeader); + + // when + await fakeTimer.tickAsync(6000); + + // then + expect(onLeader).toHaveBeenCalled(); + }); + }); + }); + + describe('given the instance is leader', () => { + beforeEach(async () => { + lockHandlerMock.tryToLockLeaderResource.mockResolvedValue(true); + + const isLeaderAlready = new Promise((resolve) => { + cut.once('leader', resolve); + }); + + cut.start(); + + // wait for the leadership + await fakeTimer.tickAsync(100); + await isLeaderAlready; + }); + + it('then periodically it extends the resource lock.', async () => { + // when: 2000 ms (configured extension time) + await fakeTimer.tickAsync(2000); + + // then + expect(lockHandlerMock.tryToMaintainTheLock).toHaveBeenCalled(); + }); + + describe('but the resource lock cannot be extended', () => { + beforeEach(() => { + lockHandlerMock.tryToMaintainTheLock.mockResolvedValue(false); + }); + + it('when the periodic extension job executes, then the instance becomes the follower.', async () => { + const isFollower = new Promise((resolve) => { + cut.once('follower', resolve); + }); + + // when + await fakeTimer.tickAsync(5000); + + // then + await expect(isFollower).toResolve(); + }); + }); + }); + + describe('given the instance is follower', () => { + beforeEach(async () => { + lockHandlerMock.tryToLockLeaderResource.mockResolvedValue(false); + + const isFollowerAlready = new Promise((resolve) => { + cut.once('follower', resolve); + }); + + cut.start(); + + // wait for the leadership + await fakeTimer.tickAsync(6000); + await isFollowerAlready; + }); + + describe('and the leader died and freed the resource', () => { + beforeEach(() => { + lockHandlerMock.tryToLockLeaderResource.mockResolvedValue(true); + }); + + it('when instance locked the resource, then it becomes the leader.', async () => { + const isLeader = new Promise((resolve) => { + cut.once('leader', resolve); + }); + + // when + await fakeTimer.tickAsync(6000); + + // then + await expect(isLeader).toResolve(); + }); + }); + }); +}); diff --git a/src/components/leader-election/index.ts b/src/components/leader-election/index.ts new file mode 100644 index 0000000..3308f1e --- /dev/null +++ b/src/components/leader-election/index.ts @@ -0,0 +1,109 @@ +import { + ILeadershipElectionGivenOptions, + ILeadershipElectionOptions, + ILockHandler, + LeaderElectionEvents, +} from './types'; +import { TypedEmitter } from 'tiny-typed-emitter'; + +export class LeaderElection extends TypedEmitter { + private isLeader?: boolean; + private options: ILeadershipElectionOptions; + + private electionExtensionTimeout?: NodeJS.Timeout; + private electionTimeout?: NodeJS.Timeout; + + private withDefaults(givenOptions: ILeadershipElectionGivenOptions): ILeadershipElectionOptions { + return { + expireInMs: 60_000, + ...givenOptions, + }; + } + + constructor( + private readonly lockHandler: ILockHandler, + readonly identifier: string, + options: ILeadershipElectionGivenOptions, + ) { + super(); + + // compose options + this.options = this.withDefaults(options); + } + + start(): void { + // immediately start the "fight" for leadership + this.scheduleLeadershipTry(0); + } + + close(): void { + // cleanup leadership extension (if started) + this.electionExtensionTimeout && clearTimeout(this.electionExtensionTimeout); + + // cleanup follower trials of becoming the leader (if started) + this.electionTimeout && clearTimeout(this.electionTimeout); + } + + private async becomeLeader(): Promise { + this.isLeader = true; + + // cleanup follower trials of becoming the leader + this.electionTimeout && clearTimeout(this.electionTimeout); + + // start the periodic leadership extension process + this.scheduleLeadershipExtension(); + + // notify everyone we're leader + this.emit('leader'); + } + + private becomeFollower(): void { + // do nothing for now + this.isLeader = false; + + // cleanup leadership extension (if started) + this.electionExtensionTimeout && clearTimeout(this.electionExtensionTimeout); + + // notify everyone we're follower + this.emit('follower'); + } + + private async tryToBecomeLeader(): Promise { + const becameLeader = await this.lockHandler.tryToLockLeaderResource( + this.options.key, + this.identifier, + this.options.expireInMs, + ); + + this.scheduleLeadershipTry(); + + if (becameLeader === this.isLeader) { + return; + } + + if (becameLeader) { + await this.becomeLeader(); + } else { + await this.becomeFollower(); + } + } + + private async extendLeadership(): Promise { + if (await this.lockHandler.tryToMaintainTheLock(this.options.key, this.identifier, this.options.expireInMs)) { + this.scheduleLeadershipExtension(); + } else { + await this.becomeFollower(); + } + } + + private scheduleLeadershipExtension(): void { + this.electionExtensionTimeout = setTimeout( + () => this.extendLeadership(), + this.options.prolongLeadershipIntervalMs || this.options.expireInMs / 2, + ); + } + + private scheduleLeadershipTry(timeout = this.options.expireInMs) { + this.electionTimeout = setTimeout(() => this.tryToBecomeLeader(), timeout); + } +} diff --git a/src/components/leader-election/ioredis.lock-handler.ts b/src/components/leader-election/ioredis.lock-handler.ts new file mode 100644 index 0000000..f4d1b26 --- /dev/null +++ b/src/components/leader-election/ioredis.lock-handler.ts @@ -0,0 +1,51 @@ +import { ILockHandler } from './types'; +import IORedis from 'ioredis'; + +const NUM_OF_KEYS_IN_LUA_SCRIPT = 1; + +export class IORedisLockHandler implements ILockHandler { + constructor(private readonly redis: IORedis) {} + + private static EXTEND_LEADERSHIP_SCRIPT = + "if redis.call('GET', KEYS[1]) == ARGV[1] then return redis.call('PEXPIRE', KEYS[1], ARGV[2]) else return 0 end"; + + /** + * This method calls the Lua script that prolongs the lock on given `leadershipResourceKey` only, when stored value + * equals to given `instanceIdentifier` and then method resolves to `true`. + * + * When `leadershipResourceKey` doesn't exist, or it has a different value, then the leadership is not prolonged and + * method resolves to `false`. + * + * Using Lua script ensures the atomicity of the whole process. Without it there is no guarantee that other Redis + * client doesn't execute operation on `leadershipResourceKey` in-between `GET` and `PEXPIRE` commands. + */ + async tryToMaintainTheLock( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise { + const extended = await this.redis.eval( + IORedisLockHandler.EXTEND_LEADERSHIP_SCRIPT, + NUM_OF_KEYS_IN_LUA_SCRIPT, + leadershipResourceKey, + instanceIdentifier, + expirationTimeMs, + ); + + return (extended as number) > 0; + } + + /** + * This stores the `instanceIdentifier` value into `leadershipResourceKey` only, when the key doesn't exist. If value + * is stored, then TTL is also set to `expirationTimeMs` and method resolves to `true`. + * + * Otherwise method resolved to `false` and no change to `leadershipResourceKey` is introduced. + */ + async tryToLockLeaderResource( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise { + return (await this.redis.set(leadershipResourceKey, instanceIdentifier, 'PX', expirationTimeMs, 'NX')) !== null; + } +} diff --git a/src/components/leader-election/redis-based.lock-handlers.spec.ts b/src/components/leader-election/redis-based.lock-handlers.spec.ts new file mode 100644 index 0000000..23184e7 --- /dev/null +++ b/src/components/leader-election/redis-based.lock-handlers.spec.ts @@ -0,0 +1,125 @@ +import 'jest-extended'; +import { IORedisLockHandler } from './ioredis.lock-handler'; +import { RedisLockHandler } from './redis.lock-handler'; +import IORedis from 'ioredis'; +import { ILockHandler } from './types'; +import { createClient, RedisClientType } from 'redis'; + +let testRedisConnection: IORedis; + +const RESOURCE_KEY = 'key_to_lock'; + +beforeAll(() => { + testRedisConnection = new IORedis(36279, 'localhost'); +}); + +afterEach(async () => { + await testRedisConnection.del(RESOURCE_KEY); +}); + +describe.each([ + { + classname: IORedisLockHandler.name, + factory: async () => { + const redis = new IORedis(36279, 'localhost'); + + return { + instance: new IORedisLockHandler(redis), + closeFn: () => redis.quit(), + }; + }, + }, + { + classname: RedisLockHandler.name, + factory: async () => { + const redis: RedisClientType = createClient({ url: 'redis://localhost:36279' }); + await redis.connect(); + + return { + instance: new RedisLockHandler(redis), + closeFn: () => redis.quit(), + }; + }, + }, +])('$classname lock handler', ({ factory }) => { + let cut: ILockHandler; + let close: () => Promise; + + beforeAll(async () => { + const { instance, closeFn } = await factory(); + + cut = instance; + close = closeFn; + }); + + describe('given the resource is not locked', () => { + it('when .tryToLockLeaderResource(..) is called, then it resolves to TRUE and given value is written to resource key with given TTL.', async () => { + // when & then + await expect(cut.tryToLockLeaderResource(RESOURCE_KEY, 'bar', 1000)).resolves.toBeTruthy(); + + // then + await expect(testRedisConnection.get(RESOURCE_KEY)).resolves.toEqual('bar'); + + // and: key is about to expire + const pttl = await testRedisConnection.pttl(RESOURCE_KEY); + + expect(pttl).toBeGreaterThan(0); + expect(pttl).toBeLessThanOrEqual(1000); + }); + + it('when .tryToMaintainTheLock(..) is called, then it resolves to FALSE and no value is written to resource key.', async () => { + // when & then + await expect(cut.tryToMaintainTheLock(RESOURCE_KEY, 'bar', 1000)).resolves.toBeFalsy(); + + // then + await expect(testRedisConnection.exists(RESOURCE_KEY)).resolves.toEqual(0); + }); + }); + + describe('given the resource is already locked', () => { + const ALREADY_LOCKED_VALUE = 'foo'; + + beforeEach(async () => { + // given + await testRedisConnection.set(RESOURCE_KEY, ALREADY_LOCKED_VALUE); + }); + + it('when .tryToLockLeaderResource(..) is called, then it resolves to FALSE and resource identifier is not changed.', async () => { + // when & then + await expect(cut.tryToLockLeaderResource(RESOURCE_KEY, 'bar', 1000)).resolves.toBeFalsy(); + + // then + await expect(testRedisConnection.get(RESOURCE_KEY)).resolves.toEqual('foo'); + }); + + describe('when .tryToMaintainTheLock(..) is called', () => { + it('with the same value as already stored, then it resolves to TRUE and the resource TTL is updated.', async () => { + // when & then + await expect(cut.tryToMaintainTheLock(RESOURCE_KEY, ALREADY_LOCKED_VALUE, 1000)).resolves.toBeTruthy(); + + // then: key is about to expire + const pttl = await testRedisConnection.pttl(RESOURCE_KEY); + expect(pttl).toBeGreaterThan(0); + expect(pttl).toBeLessThanOrEqual(1000); + + // and: the value is the same + await expect(testRedisConnection.get(RESOURCE_KEY)).resolves.toEqual(ALREADY_LOCKED_VALUE); + }); + + it('with different value than already stored, then it resolves to FALSE and the resource is intact.', async () => { + // when & then + await expect(cut.tryToMaintainTheLock(RESOURCE_KEY, 'bar', 1000)).resolves.toBeFalsy(); + + // then: key is still in non-expiry mode + await expect(testRedisConnection.pttl(RESOURCE_KEY)).resolves.toEqual(-1); + + // and: the value is the same + await expect(testRedisConnection.get(RESOURCE_KEY)).resolves.toEqual(ALREADY_LOCKED_VALUE); + }); + }); + }); + + afterAll(() => close()); +}); + +afterAll(() => testRedisConnection.quit()); diff --git a/src/components/leader-election/redis.lock-handler.ts b/src/components/leader-election/redis.lock-handler.ts new file mode 100644 index 0000000..879b734 --- /dev/null +++ b/src/components/leader-election/redis.lock-handler.ts @@ -0,0 +1,48 @@ +import { ILockHandler } from './types'; +import { RedisClientType } from 'redis'; + +export class RedisLockHandler implements ILockHandler { + constructor(private readonly redis: RedisClientType) {} + + private static EXTEND_LEADERSHIP_SCRIPT = + "if redis.call('GET', KEYS[1]) == ARGV[1] then return redis.call('PEXPIRE', KEYS[1], ARGV[2]) else return 0 end"; + + /** + * This method calls the Lua script that prolongs the lock on given `leadershipResourceKey` only, when stored value + * equals to given `instanceIdentifier` and then method resolves to `true`. + * + * When `leadershipResourceKey` doesn't exist, or it has a different value, then the leadership is not prolonged and + * method resolves to `false`. + * + * Using Lua script ensures the atomicity of the whole process. Without it there is no guarantee that other Redis + * client doesn't execute operation on `leadershipResourceKey` in-between `GET` and `PEXPIRE` commands. + */ + async tryToMaintainTheLock( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise { + const extended = await this.redis.EVAL(RedisLockHandler.EXTEND_LEADERSHIP_SCRIPT, { + keys: [leadershipResourceKey], + arguments: [instanceIdentifier, expirationTimeMs.toString()], + }); + + return (extended as number) > 0; + } + + /** + * This stores the `instanceIdentifier` value into `leadershipResourceKey` only, when the key doesn't exist. If value + * is stored, then TTL is also set to `expirationTimeMs` and method resolves to `true`. + * + * Otherwise method resolved to `false` and no change to `leadershipResourceKey` is introduced. + */ + async tryToLockLeaderResource( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise { + return ( + (await this.redis.SET(leadershipResourceKey, instanceIdentifier, { PX: expirationTimeMs, NX: true })) !== null + ); + } +} diff --git a/src/components/leader-election/types.ts b/src/components/leader-election/types.ts new file mode 100644 index 0000000..6b64690 --- /dev/null +++ b/src/components/leader-election/types.ts @@ -0,0 +1,39 @@ +export interface ILockHandler { + /** + * This method is about to lock the `leadershipResourceKey` by writing its `instanceIdentifier` to it. The lock should + * not be permanent, but limited to given `expirationTimeMs`. Then the lock can be kept (extended) by calling + * `tryToMaintainTheLock` method. + */ + tryToLockLeaderResource( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise; + + /** + * This method is about to prolong the `leadershipResourceKey` time-to-live only, when the key contains value equal to + * given `instanceIdentifier`. Each instance competing for a leadership role needs to have a unique identifier. + * + * This way we know, that only the leader process can prolong its leadership. If leader dies, for any reason, no other + * process can extend its leadership. + */ + tryToMaintainTheLock( + leadershipResourceKey: string, + instanceIdentifier: string, + expirationTimeMs: number, + ): Promise; +} + +export interface ILeadershipElectionOptions { + key: string; + expireInMs: number; + prolongLeadershipIntervalMs?: number; +} + +export type ILeadershipElectionGivenOptions = Partial & + Pick; + +export interface LeaderElectionEvents { + leader: () => void; + follower: () => void; +} diff --git a/src/utils/index.ts b/src/utils/index.ts index 1a7d418..ca4b470 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -2,16 +2,13 @@ import Logger from '../components/logger'; export interface RetryOptions { numberOfTries: number; - secondsDelayRange: { + delayRangeMs: { min: number; max: number; }; } -export const retry = async ( - func: () => Promise | unknown, - { numberOfTries, secondsDelayRange }: RetryOptions, -) => { +export const retry = async (func: () => Promise | unknown, { numberOfTries, delayRangeMs }: RetryOptions) => { try { return await func(); } catch (error) { @@ -19,11 +16,11 @@ export const retry = async ( if (numberOfTries === 1) { throw error; } - const delayTime = - Math.floor(Math.random() * (secondsDelayRange.max - secondsDelayRange.min + 1)) + secondsDelayRange.min; - Logger.debug(`trying again in ${delayTime} seconds`); - await delay(delayTime * 1000); - return retry(func, { numberOfTries: numberOfTries - 1, secondsDelayRange }); + const delayTime = Math.floor(Math.random() * (delayRangeMs.max - delayRangeMs.min + 1)) + delayRangeMs.min; + Logger.debug(`trying again in ${delayTime} ms`); + await delay(delayTime); + + return retry(func, { numberOfTries: numberOfTries - 1, delayRangeMs }); } }; diff --git a/src/utils/package-loader.ts b/src/utils/package-loader.ts index 179bb3f..709a9fd 100644 --- a/src/utils/package-loader.ts +++ b/src/utils/package-loader.ts @@ -1,7 +1,7 @@ import * as path from 'path'; export class PackageUtils { - public static loadPackage(name: string): unknown { + public static loadPackage(name: string): T { const packagePath = path.resolve(process.cwd() + '/node_modules/' + name); try {