diff --git a/CHANGELOG.md b/CHANGELOG.md index 38b96e01..903827ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,8 @@ ## [Unreleased](https://github.com/openfga/js-sdk/compare/v0.9.1...HEAD) +- feat: add support for [streamedListObjects](https://openfga.dev/api/service#/Relationship%20Queries/StreamedListObjects). See [documentation](#streamed-list-objects) + ## v0.9.1 ### [v0.9.1](https://github.com/openfga/js-sdk/compare/v0.9.0...v0.9.1) (2025-11-05) diff --git a/README.md b/README.md index 70c5b631..ff49a3c4 100644 --- a/README.md +++ b/README.md @@ -739,6 +739,35 @@ const response = await fgaClient.listObjects({ // response.objects = ["document:0192ab2a-d83f-756d-9397-c5ed9f3cb69a"] ``` +##### Streamed List Objects + +List objects of a particular type that the user has access to, using the streaming API. + +The Streamed ListObjects API is very similar to the ListObjects API, with two key differences: +1. **Streaming Results**: Instead of collecting all objects before returning a response, it streams them to the client as they are collected. +2. **No Pagination Limit**: Returns all results without the 1000-object limit of the standard ListObjects API. + +This is particularly useful when querying **computed relations** that may return large result sets. + +[API Documentation](https://openfga.dev/api/service#/Relationship%20Queries/StreamedListObjects) + +```javascript +const options = {}; + +// To override the authorization model id for this request +options.authorizationModelId = "01GXSA8YR785C4FYS3C0RTG7B1"; + +const objects = []; +for await (const response of fgaClient.streamedListObjects( + { user: "user:anne", relation: "can_read", type: "document" }, + { consistency: ConsistencyPreference.HigherConsistency } +)) { + objects.push(response.object); +} + +// objects = ["document:0192ab2a-d83f-756d-9397-c5ed9f3cb69a"] +``` + ##### List Relations List the relations a user has with an object. This wraps around [BatchCheck](#batchcheck) to allow checking multiple relationships at once. diff --git a/api.ts b/api.ts index bee86ba7..d04d1a6d 100644 --- a/api.ts +++ b/api.ts @@ -20,6 +20,7 @@ import { serializeDataIfNeeded, toPathString, createRequestFunction, + createStreamingRequestFunction, RequestArgs, CallResult, PromiseResult @@ -383,6 +384,45 @@ export const OpenFgaApiAxiosParamCreator = function (configuration: Configuratio options: localVarRequestOptions, }; }, + /** + * The Streamed ListObjects API is very similar to the ListObjects API, with two differences: + * 1. Instead of collecting all objects before returning a response, it streams them to the client as they are collected. + * 2. The number of results returned is only limited by the execution timeout specified in the flag OPENFGA_LIST_OBJECTS_DEADLINE. + * @summary Stream all objects of the given type that the user has a relation with + * @param {string} storeId + * @param {ListObjectsRequest} body + * @param {*} [options] Override http request option. + * @throws { FgaError } + */ + streamedListObjects: (storeId: string, body: ListObjectsRequest, options: any = {}): RequestArgs => { + // verify required parameter 'storeId' is not null or undefined + assertParamExists("streamedListObjects", "storeId", storeId); + // verify required parameter 'body' is not null or undefined + assertParamExists("streamedListObjects", "body", body); + const localVarPath = "/stores/{store_id}/streamed-list-objects" + .replace(`{${"store_id"}}`, encodeURIComponent(String(storeId))); + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: "POST", ...baseOptions, ...options }; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + localVarHeaderParameter["Content-Type"] = "application/json"; + + setSearchParams(localVarUrlObj, localVarQueryParameter, options.query); + localVarRequestOptions.headers = { ...localVarHeaderParameter, ...options.headers }; + localVarRequestOptions.data = serializeDataIfNeeded(body, localVarRequestOptions); + + return { + url: toPathString(localVarUrlObj), + options: localVarRequestOptions, + }; + }, /** * Returns a paginated list of OpenFGA stores and a continuation token to get additional stores. The continuation token will be empty if there are no more stores. * @summary List all stores @@ -912,6 +952,22 @@ export const OpenFgaApiFp = function(configuration: Configuration, credentials: ...TelemetryAttributes.fromRequestBody(body) }); }, + /** + * The Streamed ListObjects API is very similar to the ListObjects API, with two differences: + * 1. Instead of collecting all objects before returning a response, it streams them to the client as they are collected. + * 2. The number of results returned is only limited by the execution timeout specified in the flag OPENFGA_LIST_OBJECTS_DEADLINE. + * @summary Stream all objects of the given type that the user has a relation with + * @param {string} storeId + * @param {ListObjectsRequest} body + * @param {*} [options] Override http request option. + * @throws { FgaError } + */ + async streamedListObjects(storeId: string, body: ListObjectsRequest, options?: any): Promise<(axios?: AxiosInstance) => Promise> { + const localVarAxiosArgs = localVarAxiosParamCreator.streamedListObjects(storeId, body, options); + return createStreamingRequestFunction(localVarAxiosArgs, globalAxios, configuration, credentials, { + [TelemetryAttribute.FgaClientRequestMethod]: "StreamedListObjects" + }); + }, /** * Returns a paginated list of OpenFGA stores and a continuation token to get additional stores. The continuation token will be empty if there are no more stores. * @summary List all stores @@ -1156,6 +1212,19 @@ export const OpenFgaApiFactory = function (configuration: Configuration, credent listObjects(storeId: string, body: ListObjectsRequest, options?: any): PromiseResult { return localVarFp.listObjects(storeId, body, options).then((request) => request(axios)); }, + /** + * The Streamed ListObjects API is very similar to the ListObjects API, with two differences: + * 1. Instead of collecting all objects before returning a response, it streams them to the client as they are collected. + * 2. The number of results returned is only limited by the execution timeout specified in the flag OPENFGA_LIST_OBJECTS_DEADLINE. + * @summary Stream all objects of the given type that the user has a relation with + * @param {string} storeId + * @param {ListObjectsRequest} body + * @param {*} [options] Override http request option. + * @throws { FgaError } + */ + streamedListObjects(storeId: string, body: ListObjectsRequest, options?: any): Promise { + return localVarFp.streamedListObjects(storeId, body, options).then((request) => request(axios)); + }, /** * Returns a paginated list of OpenFGA stores and a continuation token to get additional stores. The continuation token will be empty if there are no more stores. * @summary List all stores @@ -1370,6 +1439,20 @@ export class OpenFgaApi extends BaseAPI { return OpenFgaApiFp(this.configuration, this.credentials).listObjects(storeId, body, options).then((request) => request(this.axios)); } + /** + * The Streamed ListObjects API is very similar to the ListObjects API, with two differences: + * 1. Instead of collecting all objects before returning a response, it streams them to the client as they are collected. + * 2. The number of results returned is only limited by the execution timeout specified in the flag OPENFGA_LIST_OBJECTS_DEADLINE. + * @summary Stream all objects of the given type that the user has a relation with + * @param {string} storeId + * @param {ListObjectsRequest} body + * @param {*} [options] Override http request option. + * @throws { FgaError } + */ + public streamedListObjects(storeId: string, body: ListObjectsRequest, options?: any): Promise { + return OpenFgaApiFp(this.configuration, this.credentials).streamedListObjects(storeId, body, options).then((request) => request(this.axios)); + } + /** * Returns a paginated list of OpenFGA stores and a continuation token to get additional stores. The continuation token will be empty if there are no more stores. * @summary List all stores diff --git a/apiModel.ts b/apiModel.ts index b5a8090b..2ed9fd42 100644 --- a/apiModel.ts +++ b/apiModel.ts @@ -860,6 +860,21 @@ export interface ListObjectsResponse { */ objects: Array; } + +/** + * The response for a StreamedListObjects RPC. + * @export + * @interface StreamedListObjectsResponse + */ +export interface StreamedListObjectsResponse { + /** + * + * @type {string} + * @memberof StreamedListObjectsResponse + */ + object: string; +} + /** * * @export diff --git a/client.ts b/client.ts index f55b301e..00805c32 100644 --- a/client.ts +++ b/client.ts @@ -21,6 +21,7 @@ import { GetStoreResponse, ListObjectsRequest, ListObjectsResponse, + StreamedListObjectsResponse, ListStoresResponse, ListUsersRequest, ListUsersResponse, @@ -50,6 +51,7 @@ import { } from "./utils"; import { isWellFormedUlidString } from "./validation"; import SdkConstants from "./constants"; +import { parseNDJSONStream } from "./streaming"; export type UserClientConfigurationParams = UserConfigurationParams & { storeId?: string; @@ -847,6 +849,51 @@ export class OpenFgaClient extends BaseAPI { }, options); } + /** + * StreamedListObjects - Stream all objects of a particular type that the user has a certain relation to (evaluates) + * + * Note: This method is Node.js only. Streams are supported via the axios API layer. + * The response will be streamed as newline-delimited JSON objects. + * + * @param {ClientListObjectsRequest} body + * @param {ClientRequestOptsWithConsistency} [options] + * @param {string} [options.authorizationModelId] - Overrides the authorization model id in the configuration + * @param {object} [options.headers] - Custom headers to send alongside the request + * @param {ConsistencyPreference} [options.consistency] - The consistency preference to use + * @param {object} [options.retryParams] - Override the retry parameters for this request + * @param {number} [options.retryParams.maxRetry] - Override the max number of retries on each API request + * @param {number} [options.retryParams.minWaitInMs] - Override the minimum wait before a retry is initiated + * @returns {AsyncGenerator} An async generator that yields objects as they are received + */ + async *streamedListObjects(body: ClientListObjectsRequest, options: ClientRequestOptsWithConsistency = {}): AsyncGenerator { + const stream = await this.api.streamedListObjects(this.getStoreId(options)!, { + authorization_model_id: this.getAuthorizationModelId(options), + user: body.user, + relation: body.relation, + type: body.type, + context: body.context, + contextual_tuples: { tuple_keys: body.contextualTuples || [] }, + consistency: options.consistency + }, options); + + // Unwrap axios CallResult to get the raw Node.js stream when needed + const source = stream?.$response?.data ?? stream; + + // Parse the Node.js stream + try { + for await (const item of parseNDJSONStream(source as any)) { + if (item && item.result && item.result.object) { + yield { object: item.result.object } as StreamedListObjectsResponse; + } + } + } finally { + // Ensure underlying HTTP connection closes if consumer stops early + if (source && typeof source.destroy === "function") { + try { source.destroy(); } catch { } + } + } + } + /** * ListRelations - List all the relations a user has with an object (evaluates) * @param {object} listRelationsRequest diff --git a/common.ts b/common.ts index f48c28ef..04a86953 100644 --- a/common.ts +++ b/common.ts @@ -342,6 +342,77 @@ export const createRequestFunction = function (axiosArgs: RequestArgs, axiosInst ); } + return result; + }; +}; + +/** + * creates an axios streaming request function that returns the raw response stream + * for incremental parsing (used by streamedListObjects) + */ +export const createStreamingRequestFunction = function (axiosArgs: RequestArgs, axiosInstance: AxiosInstance, configuration: Configuration, credentials: Credentials, methodAttributes: Record = {}) { + configuration.isValid(); + + const retryParams = axiosArgs.options?.retryParams ? axiosArgs.options?.retryParams : configuration.retryParams; + const maxRetry: number = retryParams ? retryParams.maxRetry : 0; + const minWaitInMs: number = retryParams ? retryParams.minWaitInMs : 0; + + const start = performance.now(); + + return async (axios: AxiosInstance = axiosInstance): Promise => { + await setBearerAuthToObject(axiosArgs.options.headers, credentials!); + + const url = configuration.getBasePath() + axiosArgs.url; + + const axiosRequestArgs = { ...axiosArgs.options, responseType: "stream", url: url }; + const wrappedResponse = await attemptHttpRequest(axiosRequestArgs, { + maxRetry, + minWaitInMs, + }, axios); + const response = wrappedResponse?.response; + + const result: any = response?.data; // raw stream + + let attributes: StringIndexable = {}; + + attributes = TelemetryAttributes.fromRequest({ + userAgent: configuration.baseOptions?.headers["User-Agent"], + httpMethod: axiosArgs.options?.method, + url, + resendCount: wrappedResponse?.retries, + start: start, + credentials: credentials, + attributes: methodAttributes, + }); + + attributes = TelemetryAttributes.fromResponse({ + response, + attributes, + }); + + const serverRequestDuration = attributes[TelemetryAttribute.HttpServerRequestDuration]; + if (configuration.telemetry?.metrics?.histogramQueryDuration && typeof serverRequestDuration !== "undefined") { + configuration.telemetry.recorder.histogram( + TelemetryHistograms.queryDuration, + parseInt(attributes[TelemetryAttribute.HttpServerRequestDuration] as string, 10), + TelemetryAttributes.prepare( + attributes, + configuration.telemetry.metrics.histogramQueryDuration.attributes + ) + ); + } + + if (configuration.telemetry?.metrics?.histogramRequestDuration) { + configuration.telemetry.recorder.histogram( + TelemetryHistograms.requestDuration, + attributes[TelemetryAttribute.HttpClientRequestDuration], + TelemetryAttributes.prepare( + attributes, + configuration.telemetry.metrics.histogramRequestDuration.attributes + ) + ); + } + return result; }; }; \ No newline at end of file diff --git a/example/streamed-list-objects/README.md b/example/streamed-list-objects/README.md new file mode 100644 index 00000000..39c1cb83 --- /dev/null +++ b/example/streamed-list-objects/README.md @@ -0,0 +1,96 @@ +# Streamed List Objects Example + +Demonstrates using `streamedListObjects` to retrieve objects via the streaming API. + +## What is StreamedListObjects? + +The Streamed ListObjects API is very similar to the ListObjects API, with two key differences: + +1. **Streaming Results**: Instead of collecting all objects before returning a response, it streams them to the client as they are collected. +2. **No Pagination Limit**: Returns all results without the 1000-object limit of the standard ListObjects API. + +This makes it ideal for scenarios where you need to retrieve large numbers of objects, especially when querying computed relations. + +## Prerequisites +- OpenFGA server running on `http://localhost:8080` (or set `FGA_API_URL`) + +## Running +```bash +# From repo root +npm run build +cd example/streamed-list-objects +npm install +npm start +``` + +## What it does +- Creates a temporary store +- Writes an authorization model with **computed relations** +- Adds 2000 tuples (1000 owners + 1000 viewers) +- Queries the **computed `can_read` relation** via `streamedListObjects` +- Shows all 2000 results (demonstrating computed relations) +- Shows progress (first 3 objects and every 500th) +- Cleans up the store + +## Authorization Model + +The example demonstrates OpenFGA's **computed relations**: + +``` +type user + +type document + relations + define owner: [user] + define viewer: [user] + define can_read: owner or viewer +``` + +**Why this matters:** +- We write tuples to `owner` and `viewer` (base permissions) +- We query `can_read` (computed from owner OR viewer) + +**Example flow:** +1. Write: `user:anne owner document:1-1000` +2. Write: `user:anne viewer document:1001-2000` +3. Query: `streamedListObjects(user:anne, relation:can_read, type:document)` +4. Result: All 2000 documents (because `can_read = owner OR viewer`) + +## Key Features Demonstrated + +### Async Generator Pattern + +The `streamedListObjects` method returns an async generator, which is the idiomatic Node.js way to handle streaming data: + +```javascript +for await (const response of fgaClient.streamedListObjects(request)) { + console.log(`Received: ${response.object}`); +} +``` + +### Early Break and Cleanup + +The streaming implementation properly handles early termination: + +```javascript +for await (const response of fgaClient.streamedListObjects(request)) { + console.log(response.object); + if (someCondition) { + break; // Stream is automatically cleaned up + } +} +``` + +## Benefits Over ListObjects + +- **No Pagination**: Retrieve all objects in a single streaming request +- **Lower Memory**: Objects are processed as they arrive, not held in memory +- **Early Termination**: Can stop streaming at any point without wasting resources +- **Better for Large Results**: Ideal when expecting hundreds or thousands of objects + +## Performance Considerations + +- Streaming starts immediately - no need to wait for all results +- HTTP connection remains open during streaming +- Properly handles cleanup if consumer stops early +- Supports all the same options as `listObjects` (consistency, contextual tuples, etc.) diff --git a/example/streamed-list-objects/package-lock.json b/example/streamed-list-objects/package-lock.json new file mode 100644 index 00000000..29d08928 --- /dev/null +++ b/example/streamed-list-objects/package-lock.json @@ -0,0 +1,423 @@ +{ + "name": "streamed-list-objects", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "streamed-list-objects", + "version": "1.0.0", + "license": "Apache-2.0", + "dependencies": { + "@openfga/sdk": "^0.9.0", + "@openfga/syntax-transformer": "^0.2.0" + }, + "engines": { + "node": ">=16.15.0" + } + }, + "node_modules/@openfga/sdk": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@openfga/sdk/-/sdk-0.9.1.tgz", + "integrity": "sha512-Mv/U4DC9tmX3g1BwXhwPkihy38UVjcoAgn1eqKevTy7p1vlFXlsGEpIpz67eQKRK7pZ2akX/h9rOcfHHkg6N4A==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.9.0", + "axios": "^1.12.2", + "jose": "^5.10.0", + "tiny-async-pool": "^2.1.0" + }, + "engines": { + "node": ">=16.15.0" + } + }, + "node_modules/@openfga/syntax-transformer": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@openfga/syntax-transformer/-/syntax-transformer-0.2.0.tgz", + "integrity": "sha512-QXFs0167U1b1gp56Bxhy+YUEmFMBE1dQunU/kU8OWaXkrpb9TC1uAX8QazmMOYrEP2Qx6rBVBypuMmRKM20uJA==", + "license": "Apache-2.0", + "dependencies": { + "ajv": "^8.17.1", + "antlr4": "^4.13.2", + "yaml": "^2.8.0" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/antlr4": { + "version": "4.13.2", + "resolved": "https://registry.npmjs.org/antlr4/-/antlr4-4.13.2.tgz", + "integrity": "sha512-QiVbZhyy4xAZ17UPEuG3YTOt8ZaoeOR1CvEAqrEsDBsOqINslaB147i9xqljZqoyf5S+EUlGStaj+t22LT9MOg==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=16" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz", + "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/jose": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/jose/-/jose-5.10.0.tgz", + "integrity": "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT" + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tiny-async-pool": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tiny-async-pool/-/tiny-async-pool-2.1.0.tgz", + "integrity": "sha512-ltAHPh/9k0STRQqaoUX52NH4ZQYAJz24ZAEwf1Zm+HYg3l9OXTWeqWKyYsHu40wF/F0rxd2N2bk5sLvX2qlSvg==", + "license": "MIT" + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + } + } +} diff --git a/example/streamed-list-objects/package.json b/example/streamed-list-objects/package.json new file mode 100644 index 00000000..2ef7198c --- /dev/null +++ b/example/streamed-list-objects/package.json @@ -0,0 +1,19 @@ +{ + "name": "streamed-list-objects", + "private": "true", + "version": "1.0.0", + "description": "Example demonstrating streamedListObjects", + "author": "OpenFGA", + "license": "Apache-2.0", + "scripts": { + "start": "node streamedListObjects.mjs" + }, + "dependencies": { + "@openfga/sdk": "^0.9.0", + "@openfga/syntax-transformer": "^0.2.0" + }, + "engines": { + "node": ">=16.15.0" + } +} + diff --git a/example/streamed-list-objects/streamedListObjects.mjs b/example/streamed-list-objects/streamedListObjects.mjs new file mode 100644 index 00000000..2c69968e --- /dev/null +++ b/example/streamed-list-objects/streamedListObjects.mjs @@ -0,0 +1,90 @@ +import { ClientConfiguration, OpenFgaClient, ConsistencyPreference } from "../../dist/index.js"; +import { transformer } from "@openfga/syntax-transformer"; + +const apiUrl = process.env.FGA_API_URL || "http://localhost:8080"; + +async function main() { + const client = new OpenFgaClient(new ClientConfiguration({ apiUrl })); + + console.log("Creating temporary store"); + const { id: storeId } = await client.createStore({ name: "streamed-list-objects" }); + + const clientWithStore = new OpenFgaClient(new ClientConfiguration({ apiUrl, storeId })); + + const dslString = ` + model + schema 1.1 + + type user + + type document + relations + define owner: [user] + define viewer: [user] + define can_read: owner or viewer + `; + + const model = transformer.transformDSLToJSONObject(dslString); + + console.log("Writing authorization model"); + const { authorization_model_id: authorizationModelId } = await clientWithStore.writeAuthorizationModel(model); + + const fga = new OpenFgaClient(new ClientConfiguration({ apiUrl, storeId, authorizationModelId })); + + console.log("Writing tuples (1000 as owner, 1000 as viewer)"); + + // Write in batches of 100 (OpenFGA limit) + const batchSize = 100; + let totalWritten = 0; + + // Write 1000 documents where anne is the owner + for (let batch = 0; batch < 10; batch++) { + const writes = []; + for (let i = 1; i <= batchSize; i++) { + writes.push({ user: "user:anne", relation: "owner", object: `document:${batch * batchSize + i}` }); + } + await fga.write({ writes }); + totalWritten += writes.length; + } + + // Write 1000 documents where anne is a viewer + for (let batch = 0; batch < 10; batch++) { + const writes = []; + for (let i = 1; i <= batchSize; i++) { + writes.push({ user: "user:anne", relation: "viewer", object: `document:${1000 + batch * batchSize + i}` }); + } + await fga.write({ writes }); + totalWritten += writes.length; + } + + console.log(`Wrote ${totalWritten} tuples`); + + console.log("Streaming objects via computed 'can_read' relation..."); + let count = 0; + for await (const response of fga.streamedListObjects( + { user: "user:anne", relation: "can_read", type: "document" }, // can_read is computed: owner OR viewer + { consistency: ConsistencyPreference.HigherConsistency } + )) { + count++; + if (count <= 3 || count % 500 === 0) { + console.log(`- ${response.object}`); + } + } + console.log(`\u2713 Streamed ${count} objects`); + + console.log("Cleaning up..."); + await fga.deleteStore(); + console.log("Done"); +} + +main().catch(err => { + // Avoid logging sensitive data; only display generic info + if (err && err.name === "FgaValidationError") { + console.error("Validation error in configuration. Please check your configuration for errors."); + } else if (err.message && err.message.includes("ECONNREFUSED")) { + console.error("Is OpenFGA server running on", apiUrl, "?"); + } else { + console.error("An error occurred.", err && err.name ? `[${err.name}]` : ""); + } + process.exit(1); +}); \ No newline at end of file diff --git a/index.ts b/index.ts index 291e2944..2b94e822 100644 --- a/index.ts +++ b/index.ts @@ -24,5 +24,5 @@ export * from "./telemetry/counters"; export * from "./telemetry/histograms"; export * from "./telemetry/metrics"; export * from "./errors"; - +export { parseNDJSONStream } from "./streaming"; diff --git a/streaming.ts b/streaming.ts new file mode 100644 index 00000000..e5e71fa5 --- /dev/null +++ b/streaming.ts @@ -0,0 +1,185 @@ +/** + * JavaScript and Node.js SDK for OpenFGA + * + * API version: 1.x + * Website: https://openfga.dev + * Documentation: https://openfga.dev/docs + * Support: https://openfga.dev/community + * License: [Apache-2.0](https://github.com/openfga/js-sdk/blob/main/LICENSE) + * + * NOTE: This file was auto generated by OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. + */ + +import type { Readable } from "node:stream"; + +// Helper: create async iterable from classic EventEmitter-style Readable streams +const createAsyncIterableFromReadable = (readable: any): AsyncIterable => { + return { + [Symbol.asyncIterator](): AsyncIterator { + const chunkQueue: any[] = []; + const pendings: Array<{ resolve: (v: IteratorResult) => void; reject: (e?: any) => void }> = []; + let ended = false; + let error: any = null; + + const onData = (chunk: any) => { + if (pendings.length > 0) { + const { resolve } = pendings.shift()!; + resolve({ value: chunk, done: false }); + } else { + chunkQueue.push(chunk); + } + }; + + const onEnd = () => { + if (error) return; // Don't process end if error already occurred + ended = true; + while (pendings.length > 0) { + const { resolve } = pendings.shift()!; + resolve({ value: undefined, done: true }); + } + }; + + const onError = (err: any) => { + error = err; + while (pendings.length > 0) { + const { reject } = pendings.shift()!; + reject(err); + } + cleanup(); + }; + + readable.on("data", onData); + readable.once("end", onEnd); + readable.once("error", onError); + + const cleanup = () => { + readable.off("data", onData); + readable.off("end", onEnd); + readable.off("error", onError); + }; + + return { + next(): Promise> { + if (error) { + return Promise.reject(error); + } + if (chunkQueue.length > 0) { + const value = chunkQueue.shift(); + return Promise.resolve({ value, done: false }); + } + if (ended) { + cleanup(); + return Promise.resolve({ value: undefined, done: true }); + } + return new Promise>((resolve, reject) => { + pendings.push({ resolve, reject }); + }); + }, + return(): Promise> { + try { + cleanup(); + } finally { + if (readable && typeof readable.destroy === "function") { + readable.destroy(); + } + } + return Promise.resolve({ value: undefined, done: true }); + }, + throw(e?: any): Promise> { + try { + cleanup(); + } finally { + if (readable && typeof readable.destroy === "function") { + readable.destroy(e); + } + } + return Promise.reject(e); + } + }; + } + }; +}; + +/** + * Parse newline-delimited JSON (NDJSON) from a Node.js readable stream + * @param stream - Node.js readable stream, AsyncIterable, string, or Buffer + * @returns AsyncGenerator that yields parsed JSON objects + */ +export async function* parseNDJSONStream( + stream: Readable | AsyncIterable | string | Uint8Array | Buffer +): AsyncGenerator { + const decoder = new TextDecoder("utf-8"); + let buffer = ""; + + // If stream is actually a string or Buffer-like, handle as whole payload + const isString = typeof stream === "string"; + const isBuffer = typeof Buffer !== "undefined" && Buffer.isBuffer && Buffer.isBuffer(stream); + const isUint8Array = typeof Uint8Array !== "undefined" && stream instanceof Uint8Array; + + if (isString || isBuffer || isUint8Array) { + const text = isString + ? (stream as string) + : decoder.decode(isBuffer ? new Uint8Array(stream as Buffer) : (stream as Uint8Array)); + const lines = text.split("\n"); + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + try { + yield JSON.parse(trimmed); + } catch (err) { + console.warn("Failed to parse JSON line:", err); + } + } + return; + } + + const isAsyncIterable = stream && typeof (stream as any)[Symbol.asyncIterator] === "function"; + const source: AsyncIterable = isAsyncIterable ? (stream as any) : createAsyncIterableFromReadable(stream as any); + + for await (const chunk of source) { + // Node.js streams can return Buffer or string chunks + // Convert to Uint8Array if needed for TextDecoder + const uint8Chunk = typeof chunk === "string" + ? new TextEncoder().encode(chunk) + : chunk instanceof Buffer + ? new Uint8Array(chunk) + : chunk; + + // Append decoded chunk to buffer + buffer += decoder.decode(uint8Chunk, { stream: true }); + + // Split on newlines + const lines = buffer.split("\n"); + + // Keep the last (potentially incomplete) line in the buffer + buffer = lines.pop() || ""; + + // Parse and yield complete lines + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed) { + try { + yield JSON.parse(trimmed); + } catch (err) { + console.warn("Failed to parse JSON line:", err); + } + } + } + } + + // Flush any remaining decoder state + buffer += decoder.decode(); + + // Handle any remaining data in buffer + if (buffer.trim()) { + try { + yield JSON.parse(buffer); + } catch (err) { + console.warn("Failed to parse final JSON buffer:", err); + } + } +} \ No newline at end of file diff --git a/tests/client.test.ts b/tests/client.test.ts index 77119cfb..82f270a4 100644 --- a/tests/client.test.ts +++ b/tests/client.test.ts @@ -1,4 +1,5 @@ import * as nock from "nock"; +import { Readable } from "node:stream"; import { ClientWriteStatus, @@ -1597,6 +1598,139 @@ describe("OpenFGA Client", () => { }); }); + describe("StreamedListObjects", () => { + it("should stream objects and yield them incrementally", async () => { + const objects = ["document:1", "document:2", "document:3"]; + const scope = nocks.streamedListObjects(baseConfig.storeId!, objects); + + expect(scope.isDone()).toBe(false); + + const results: string[] = []; + for await (const response of fgaClient.streamedListObjects({ + user: "user:81684243-9356-4421-8fbf-a4f8d36aa31b", + relation: "can_read", + type: "document", + })) { + results.push(response.object); + } + + expect(scope.isDone()).toBe(true); + expect(results).toHaveLength(3); + expect(results).toEqual(expect.arrayContaining(objects)); + }); + + it("should handle custom headers", async () => { + const objects = ["document:1"]; + + const scope = nock(defaultConfiguration.getBasePath()) + .post(`/stores/${baseConfig.storeId}/streamed-list-objects`) + .reply(function () { + // Verify custom headers were sent + expect(this.req.headers["x-custom-header"]).toBe("custom-value"); + expect(this.req.headers["x-request-id"]).toBe("test-123"); + + // Return NDJSON stream + const ndjsonResponse = objects + .map(obj => JSON.stringify({ result: { object: obj } })) + .join("\n") + "\n"; + + return [200, Readable.from([ndjsonResponse]), { + "Content-Type": "application/x-ndjson" + }]; + }); + + const results: string[] = []; + for await (const response of fgaClient.streamedListObjects({ + user: "user:anne", + relation: "owner", + type: "document", + }, { + headers: { + "X-Custom-Header": "custom-value", + "X-Request-ID": "test-123" + } + })) { + results.push(response.object); + } + + expect(scope.isDone()).toBe(true); + expect(results).toEqual(objects); + }); + + it("should handle errors from the stream", async () => { + const scope = nock(defaultConfiguration.getBasePath()) + .post(`/stores/${baseConfig.storeId}/streamed-list-objects`) + .reply(500, { code: "internal_error", message: "Server error" }); + + await expect(async () => { + for await (const response of fgaClient.streamedListObjects({ + user: "user:anne", + relation: "owner", + type: "document", + })) { + // Should not get here + } + }).rejects.toThrow(); + + expect(scope.isDone()).toBe(true); + }); + + it("should handle retry on 429 error", async () => { + const objects = ["document:1"]; + + // Create client with retry enabled + const fgaClientWithRetry = new OpenFgaClient({ + ...baseConfig, + credentials: { method: CredentialsMethod.None }, + retryParams: { maxRetry: 2, minWaitInMs: 10 } + }); + + // First attempt fails with 429 (called exactly once) + const scope1 = nock(defaultConfiguration.getBasePath()) + .post(`/stores/${baseConfig.storeId}/streamed-list-objects`) + .times(1) + .reply(429, { code: "rate_limit_exceeded", message: "Rate limited" }, { + "Retry-After": "1" + }); + + // Second attempt succeeds (retry - called exactly once) + const scope2 = nocks.streamedListObjects(baseConfig.storeId!, objects); + + const results: string[] = []; + for await (const response of fgaClientWithRetry.streamedListObjects({ + user: "user:anne", + relation: "owner", + type: "document", + })) { + results.push(response.object); + } + + // Verify both scopes were called (proves retry happened) + expect(scope1.isDone()).toBe(true); + expect(scope2.isDone()).toBe(true); + expect(results).toEqual(objects); + }); + + it("should support consistency preference", async () => { + const objects = ["document:1"]; + const scope = nocks.streamedListObjects(baseConfig.storeId!, objects); + + const results: string[] = []; + for await (const response of fgaClient.streamedListObjects({ + user: "user:anne", + relation: "owner", + type: "document", + }, { + consistency: ConsistencyPreference.HigherConsistency + })) { + results.push(response.object); + } + + expect(scope.isDone()).toBe(true); + expect(results).toEqual(objects); + }); + }); + describe("ListRelations", () => { it("should properly pass the request and return an allowed API response", async () => { const tuples = [{ diff --git a/tests/helpers/nocks.ts b/tests/helpers/nocks.ts index bc461236..11e73a15 100644 --- a/tests/helpers/nocks.ts +++ b/tests/helpers/nocks.ts @@ -1,5 +1,7 @@ import type * as Nock from "nock"; +import { Readable } from "node:stream"; + import { AuthorizationModel, BatchCheckRequest, @@ -244,6 +246,22 @@ export const getNocks = ((nock: typeof Nock) => ({ ) .reply(200, responseBody); }, + streamedListObjects: ( + storeId: string, + objects: string[], + basePath = defaultConfiguration.getBasePath(), + ) => { + // Create NDJSON response (newline-delimited JSON) as a stream + const ndjsonResponse = objects + .map(obj => JSON.stringify({ result: { object: obj } })) + .join("\n") + "\n"; + + return nock(basePath) + .post(`/stores/${storeId}/streamed-list-objects`) + .reply(200, () => Readable.from([ndjsonResponse]), { + "Content-Type": "application/x-ndjson" + }); + }, listUsers: ( storeId: string, responseBody: ListUsersResponse, diff --git a/tests/streaming.test.ts b/tests/streaming.test.ts new file mode 100644 index 00000000..19882e78 --- /dev/null +++ b/tests/streaming.test.ts @@ -0,0 +1,259 @@ +/** + * JavaScript and Node.js SDK for OpenFGA + * + * API version: 1.x + * Website: https://openfga.dev + * Documentation: https://openfga.dev/docs + * Support: https://openfga.dev/community + * License: [Apache-2.0](https://github.com/openfga/js-sdk/blob/main/LICENSE) + * + * NOTE: This file was auto generated by OpenAPI Generator (https://openapi-generator.tech). DO NOT EDIT. + */ + + +import { Readable } from "node:stream"; +import { EventEmitter } from "node:events"; +import { parseNDJSONStream } from "../streaming"; + +describe("Streaming Utilities", () => { + describe("parseNDJSONStream (Node.js)", () => { + it("should parse single line NDJSON", async () => { + const ndjson = '{"result":{"object":"document:1"}}\n'; + const stream = Readable.from([ndjson]); + + const results: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + results.push(item); + } + + expect(results).toHaveLength(1); + expect(results[0]).toEqual({ result: { object: "document:1" } }); + }); + + it("should parse multiple line NDJSON", async () => { + const ndjson = '{"result":{"object":"document:1"}}\n{"result":{"object":"document:2"}}\n{"result":{"object":"document:3"}}\n'; + const stream = Readable.from([ndjson]); + + const results: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + results.push(item); + } + + expect(results).toHaveLength(3); + expect(results[0]).toEqual({ result: { object: "document:1" } }); + expect(results[1]).toEqual({ result: { object: "document:2" } }); + expect(results[2]).toEqual({ result: { object: "document:3" } }); + }); + + it("should handle chunked data across multiple reads", async () => { + // Simulate data coming in chunks that split JSON objects mid-line + const chunks = [ + '{"result":{"object":"document:1"}}\n{"res', + 'ult":{"object":"document:2"}}\n' + ]; + + const stream = Readable.from(chunks); + + const results: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + results.push(item); + } + + expect(results).toHaveLength(2); + expect(results[0]).toEqual({ result: { object: "document:1" } }); + expect(results[1]).toEqual({ result: { object: "document:2" } }); + }); + + it("should handle empty lines", async () => { + const ndjson = '{"result":{"object":"document:1"}}\n\n{"result":{"object":"document:2"}}\n'; + const stream = Readable.from([ndjson]); + + const results: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + results.push(item); + } + + expect(results).toHaveLength(2); + }); + + it("should skip invalid JSON lines", async () => { + const consoleWarnSpy = jest.spyOn(console, "warn").mockImplementation(); + + const ndjson = '{"result":{"object":"document:1"}}\ninvalid json\n{"result":{"object":"document:2"}}\n'; + const stream = Readable.from([ndjson]); + + const results: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + results.push(item); + } + + expect(results).toHaveLength(2); + expect(results[0]).toEqual({ result: { object: "document:1" } }); + expect(results[1]).toEqual({ result: { object: "document:2" } }); + expect(consoleWarnSpy).toHaveBeenCalled(); + + consoleWarnSpy.mockRestore(); + }); + + it("should parse when Readable emits Buffer chunks", async () => { + const ndjson = Buffer.from('{"a":1}\n{"b":2}\n'); + const stream = Readable.from([ndjson]); + + const out: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + }); + + it("should parse last JSON without trailing newline", async () => { + const stream = Readable.from(['{"a":1}\n{"b":2}']); + + const out: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + }); + + it("should skip invalid final JSON buffer and warn", async () => { + const warn = jest.spyOn(console, "warn").mockImplementation(); + const stream = Readable.from(['{"a":1}\n{"b":']); + + const out: any[] = []; + for await (const item of parseNDJSONStream(stream)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }]); + expect(warn).toHaveBeenCalled(); + warn.mockRestore(); + }); + + it("should parse when given a string input", async () => { + const input = '{"a":1}\n{"b":2}\n'; + const out: any[] = []; + for await (const item of parseNDJSONStream(input as any)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + }); + + it("should parse when given a Buffer input", async () => { + const input = Buffer.from('{"a":1}\n{"b":2}\n'); + const out: any[] = []; + for await (const item of parseNDJSONStream(input as any)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + }); + + it("should accept async iterable that yields Uint8Array", async () => { + const src = { + [Symbol.asyncIterator]: async function* () { + yield new TextEncoder().encode('{"a":1}\n{"b":2}\n'); + } + } as any; + + const out: any[] = []; + for await (const item of parseNDJSONStream(src)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + }); + + it("should reject pending iteration when classic emitter errors", async () => { + const emitter = new EventEmitter() as any; + const gen = parseNDJSONStream(emitter); + + const firstPromise = gen.next(); + emitter.emit("data", '{"a":1}\n'); + const first = await firstPromise; + expect(first.value).toEqual({ a: 1 }); + + const pendingNext = gen.next(); + emitter.emit("error", new Error("boom")); + + // Pending next should now reject with the error + await expect(pendingNext).rejects.toThrow("boom"); + + // After error, iterator is exhausted (standard async iterator behavior) + await expect(gen.next()).resolves.toEqual({ value: undefined, done: true }); + }); + + it("should clean up listeners on early cancellation", async () => { + const emitter = new EventEmitter() as any; + const gen = parseNDJSONStream(emitter); + + const p = gen.next(); + emitter.emit("data", '{"a":1}\n'); + const first = await p; + expect(first.value).toEqual({ a: 1 }); + + await gen.return(undefined as any); + expect(emitter.listenerCount("data")).toBe(0); + expect(emitter.listenerCount("end")).toBe(0); + expect(emitter.listenerCount("error")).toBe(0); + }); + + it("should read from buffered queue when data exceeds pending resolvers", async () => { + const emitter = new EventEmitter() as any; + const gen = parseNDJSONStream(emitter); + + // Start consumption so listeners are attached and a pending resolver exists + const firstPromise = gen.next(); + + // First chunk fulfills the pending resolver + emitter.emit("data", '{"x":1}\n'); + const first = await firstPromise; + expect(first).toEqual({ value: { x: 1 }, done: false }); + + // Emit another chunk before requesting next; it should be queued + emitter.emit("data", '{"y":2}\n'); + + // Next pull should be served from the buffered queue path + const second = await gen.next(); + expect(second).toEqual({ value: { y: 2 }, done: false }); + + // end stream to complete + emitter.emit("end"); + const done = await gen.next(); + expect(done).toEqual({ value: undefined, done: true }); + }); + + it("should resolve pending next to done when end occurs", async () => { + const emitter = new EventEmitter() as any; + const gen = parseNDJSONStream(emitter); + + const pending = gen.next(); + emitter.emit("end"); + + await expect(pending).resolves.toEqual({ value: undefined, done: true }); + await expect(gen.next()).resolves.toEqual({ value: undefined, done: true }); + }); + + it("should cleanup and reject on iterator throw", async () => { + const emitter = new EventEmitter() as any; + const gen = parseNDJSONStream(emitter); + + const thrown = gen.throw(new Error("stop")); + await expect(thrown).rejects.toThrow("stop"); + + expect(emitter.listenerCount("data")).toBe(0); + expect(emitter.listenerCount("end")).toBe(0); + expect(emitter.listenerCount("error")).toBe(0); + }); + + it("should warn on invalid JSON line in string input path", async () => { + const warn = jest.spyOn(console, "warn").mockImplementation(); + const input = '{"a":1}\nnot json\n{"b":2}\n'; + + const out: any[] = []; + for await (const item of parseNDJSONStream(input as any)) { + out.push(item); + } + expect(out).toEqual([{ a: 1 }, { b: 2 }]); + expect(warn).toHaveBeenCalled(); + warn.mockRestore(); + }); + }); +}); \ No newline at end of file