diff --git a/lib/query-builder.js b/lib/query-builder.js index 52760cf..979fe92 100644 --- a/lib/query-builder.js +++ b/lib/query-builder.js @@ -60,11 +60,7 @@ function addDeferredQueryField (query, fieldName, queryFieldNode) { function toArgsAdapterInput (result, path) { if (!result) { return [] } - if (!Array.isArray(result)) { - return [result] - } - - let r = result.filter(r => !!r) + let r = Array.isArray(result) ? result.filter(r => !!r) : [result] if (!path) { return r.flat() @@ -86,7 +82,7 @@ function toArgsAdapterInput (result, path) { i++ } - return r.flat() + return r.flat(Infinity) } function buildQuery (query, parentResult) { diff --git a/lib/result.js b/lib/result.js index 3acd8b2..ffac6e1 100644 --- a/lib/result.js +++ b/lib/result.js @@ -28,27 +28,16 @@ function mergeResult (mainResult, fullPath, queryNode, parentResult) { return } - // traverse result till bottom - let r = mainResult[path[0]] - let i = 1 - while (i < path.length) { - const t = traverseResult(r, path[i]) - if (!t) { break } - r = t - i++ - } + const containerPath = (parentResult.path?.length ? parentResult.path : path).slice(0, -1) + const fillPath = path.slice(containerPath.length) - // fill the missing result path - const fillPath = [] - for (let j = i; j < path.length; j++) { - fillPath.push(path[j]) + let r = resolveResultPath(mainResult, containerPath) + while (!r && containerPath.length) { + fillPath.unshift(containerPath.pop()) + r = resolveResultPath(mainResult, containerPath) } - if (!r) { - // copy reference - r = mergingResult - return - } + r ??= mainResult const many = parentResult.as && parentResult.many let key, parentKey, index @@ -132,28 +121,18 @@ function copyResultRowList (dst, src, srcIndex, parentKey, keyPath, fillPath) { if (!traverseDst?.[parentKey]) { return } // TODO !undefined !null + const keys = Array.isArray(traverseDst[parentKey]) ? traverseDst[parentKey] : [traverseDst[parentKey]] let rowIndexes = [] - // TODO more performant code - // design a different struct to avoid loops - if (Array.isArray(traverseDst[parentKey])) { - for (let i = 0; i < traverseDst[parentKey].length; i++) { - const indexes = srcIndex.map.get(traverseDst[parentKey][i]) - if (indexes) { rowIndexes = rowIndexes.concat(indexes) } - } - } else { - const indexes = srcIndex.map.get(traverseDst[parentKey]) - if (indexes) { rowIndexes = indexes } + for (const key of keys) { + const indexes = srcIndex.map.get(key) + if (indexes) rowIndexes = rowIndexes.concat(indexes) } for (; fillIndex < fillPath.length; fillIndex++) { if (!traverseDst[fillPath[fillIndex]]) { // TODO get result type from types if (fillIndex === fillPath.length - 1) { - // TODO more performant code - traverseDst[fillPath[fillIndex]] = [] - for (let i = 0; i < rowIndexes.length; i++) { - traverseDst[fillPath[fillIndex]].push(src[rowIndexes[i]]) - } + traverseDst[fillPath[fillIndex]] = rowIndexes.map(i => src[i]) return } traverseDst[fillPath[fillIndex]] = {} @@ -167,37 +146,36 @@ function copyResultRowList (dst, src, srcIndex, parentKey, keyPath, fillPath) { } function resultIndex (result, key) { - if (result.length < 1) { + if (!result.length) { return { list: false, map: new Map() } } + const list = Array.isArray(result[0][key]) const index = new Map() - if (list) { - for (let i = 0; i < result.length; i++) { - for (let j = 0; j < result[i][key].length; j++) { - const s = index.get(result[i][key][j]) - if (s) { - index.set(result[i][key][j], s.concat(i)) - continue - } - index.set(result[i][key][j], [i]) - } - } - } else { - for (let i = 0; i < result.length; i++) { - const s = index.get(result[i][key]) - if (s) { - index.set(result[i][key], s.concat(i)) - continue - } - index.set(result[i][key], [i]) + for (let i = 0; i < result.length; i++) { + const keys = list ? result[i][key] : [result[i][key]] + for (const k of keys) { + const existing = index.get(k) + index.set(k, existing ? existing.concat(i) : [i]) } } return { list, map: index } } +function resolveResultPath (result, segments) { + if (!segments?.length) return result + + let current = result + for (const segment of segments) { + if (current == null) return current + current = traverseResult(current, segment) + } + + return current +} + module.exports = { traverseResult, mergeResult, diff --git a/lib/utils.js b/lib/utils.js index f20d232..f4f18b1 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -46,7 +46,7 @@ function objectDeepClone (object) { function copyObjectByKeys (to, src) { const keys = Object.keys(src) for (let i = 0; i < keys.length; i++) { - if (typeof to[keys[i]] === 'object') { + if (isObject(to[keys[i]]) && isObject(src[keys[i]])) { copyObjectByKeys(to[keys[i]], src[keys[i]]) } else { to[keys[i]] ??= src[keys[i]] diff --git a/test/entities/on-subgraphs-3.test.js b/test/entities/on-subgraphs-3.test.js index ba0980d..17e9df5 100644 --- a/test/entities/on-subgraphs-3.test.js +++ b/test/entities/on-subgraphs-3.test.js @@ -43,6 +43,12 @@ function artistsSubgraph () { firstName: 'Brian', lastName: 'Molko', profession: 'Singer' + }, + 105: { + id: 105, + firstName: 'Luciano', + lastName: 'Pavarotti', + profession: 'Singer' } } } @@ -60,7 +66,22 @@ function artistsSubgraph () { const entities = { Artist: { resolver: { name: 'artists' }, - pkey: 'id' + pkey: 'id', + many: [ + { + type: 'Song', + as: 'songs', + fkey: 'singerId', + pkey: 'id', + subgraph: 'songs-subgraph', + resolver: { + name: 'artistsSongs', + argsAdapter: (partialResults) => { + return { artistIds: partialResults.map(r => r?.id) } + } + } + } + ] } } @@ -106,6 +127,11 @@ function songsSubgraphs () { id: 3, title: 'Vieni via con me', singerId: 102 + }, + 4: { + id: 4, + title: 'Nessun dorma', + singerId: 105 } } } @@ -389,11 +415,139 @@ test('entities on subgraph, scenario #3: entities with 1-1, 1-2-m, m-2-m relatio name: 'should run a query with insane nested results', query: '{ artists (ids: ["103"]) { songs { singer { songs { singer { songs { title } }} } } } }', result: { artists: [{ songs: [{ singer: { songs: [{ singer: { songs: [{ title: 'Every you every me' }, { title: 'The bitter end' }] } }, { singer: { songs: [{ title: 'Every you every me' }, { title: 'The bitter end' }] } }] } }, { singer: { songs: [{ singer: { songs: [{ title: 'Every you every me' }, { title: 'The bitter end' }] } }, { singer: { songs: [{ title: 'Every you every me' }, { title: 'The bitter end' }] } }] } }] }] } + }, + + { + name: 'should handle deeply nested queries without returning null', + query: '{ artists (ids: ["105"]) { firstName, songs { singer { firstName, songs { title } } } } }', + result: { + artists: [{ + firstName: 'Luciano', + songs: [{ + singer: { + firstName: 'Luciano', + songs: [{ title: 'Nessun dorma' }] + } + }] + }] + } + }, + + { + name: 'should keep singer loops populated alongside sibling fields', + query: '{ artists (ids: ["103","105"]) { songs { title singer { firstName profession songs { title } } } } }', + result: { + artists: [ + { + songs: [ + { + title: 'Every you every me', + singer: { + firstName: 'Brian', + profession: 'Singer', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + }, + { + title: 'The bitter end', + singer: { + firstName: 'Brian', + profession: 'Singer', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + } + ] + }, + { + songs: [ + { + title: 'Nessun dorma', + singer: { + firstName: 'Luciano', + profession: 'Singer', + songs: [ + { title: 'Nessun dorma' } + ] + } + } + ] + } + ] + } + }, + + { + name: 'should preserve first names across alternating singer chains', + query: '{ artists (ids: ["103"]) { songs { singer { firstName songs { singer { firstName songs { title } } } } } } }', + result: { + artists: [ + { + songs: [ + { + singer: { + firstName: 'Brian', + songs: [ + { + singer: { + firstName: 'Brian', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + }, + { + singer: { + firstName: 'Brian', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + } + ] + } + }, + { + singer: { + firstName: 'Brian', + songs: [ + { + singer: { + firstName: 'Brian', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + }, + { + singer: { + firstName: 'Brian', + songs: [ + { title: 'Every you every me' }, + { title: 'The bitter end' } + ] + } + } + ] + } + } + ] + } + ] + } } ] for (const c of requests) { - await t.test(c.name, async (t) => { + await t.test(c.name, { skip: c.skip }, async (t) => { const result = await graphqlRequest(service, c.query, c.variables) assert.deepStrictEqual(result, c.result) diff --git a/test/utils-copy-object-by-keys.test.js b/test/utils-copy-object-by-keys.test.js new file mode 100644 index 0000000..832efdc --- /dev/null +++ b/test/utils-copy-object-by-keys.test.js @@ -0,0 +1,134 @@ +'use strict' + +const { test } = require('node:test') +const assert = require('node:assert') +const { copyObjectByKeys } = require('../lib/utils') + +test('copyObjectByKeys', async (t) => { + await t.test('should copy simple properties', () => { + const to = { id: '1' } + const src = { id: '1', name: 'John' } + + copyObjectByKeys(to, src) + + assert.deepStrictEqual(to, { id: '1', name: 'John' }) + }) + + await t.test('should not overwrite existing properties', () => { + const to = { id: '1', name: 'John' } + const src = { id: '1', name: 'Jane', age: 30 } + + copyObjectByKeys(to, src) + + assert.deepStrictEqual(to, { id: '1', name: 'John', age: 30 }) + }) + + await t.test('should handle nested objects', () => { + const to = { id: '1', user: { name: 'John' } } + const src = { id: '1', user: { name: 'John', age: 30 } } + + copyObjectByKeys(to, src) + + assert.deepStrictEqual(to, { id: '1', user: { name: 'John', age: 30 } }) + }) + + await t.test('should handle null values in destination without error', () => { + const to = { id: '1', user: null } + const src = { id: '1', user: { name: 'John', age: 30 } } + + // This should not throw an error + assert.doesNotThrow(() => { + copyObjectByKeys(to, src) + }) + + // null should be replaced with the object from src + assert.deepStrictEqual(to, { id: '1', user: { name: 'John', age: 30 } }) + }) + + await t.test('should handle null values in source', () => { + const to = { id: '1', user: { name: 'John' } } + const src = { id: '1', user: null } + + copyObjectByKeys(to, src) + + // null in src should not overwrite existing object in to + assert.deepStrictEqual(to, { id: '1', user: { name: 'John' } }) + }) + + await t.test('should handle mixed null and object scenarios', () => { + const to = { + id: '1', + user: null, + profile: { name: 'John' } + } + const src = { + id: '1', + user: { name: 'Jane' }, + profile: null + } + + copyObjectByKeys(to, src) + + assert.deepStrictEqual(to, { + id: '1', + user: { name: 'Jane' }, + profile: { name: 'John' } + }) + }) + + await t.test('should handle deeply nested structures with nulls', () => { + const to = { + artists: [{ + id: '105', + firstName: 'Luciano', + songs: [{ + singer: null + }] + }] + } + const src = { + artists: [{ + id: '105', + songs: [{ + singer: { + id: '105', + songs: [{ title: 'Nessun dorma' }] + } + }] + }] + } + + copyObjectByKeys(to, src) + + // firstName should be preserved, singer should be populated + assert.strictEqual(to.artists[0].firstName, 'Luciano') + assert.deepStrictEqual(to.artists[0].songs[0].singer, { + id: '105', + songs: [{ title: 'Nessun dorma' }] + }) + }) + + await t.test('should not recurse into null in destination', () => { + const to = { id: '1', nested: null } + const src = { id: '1', nested: { value: 'test' } } + + // Before the fix, this would throw: "Cannot convert undefined or null to object" + // or similar error when trying to call Object.keys on null + assert.doesNotThrow(() => { + copyObjectByKeys(to, src) + }) + + assert.deepStrictEqual(to, { id: '1', nested: { value: 'test' } }) + }) + + await t.test('should not recurse when both source and destination are not objects', () => { + const to = { id: '1', value: 'old' } + const src = { id: '1', value: 'new', extra: 'data' } + + copyObjectByKeys(to, src) + + // value should not be overwritten (??= operator) + assert.strictEqual(to.value, 'old') + assert.strictEqual(to.extra, 'data') + }) +})