Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@
"no-trailing-spaces": 2,
"no-underscore-dangle": 0,
"no-unneeded-ternary": 1,
"one-var": 2,
"quotes": [2, "single", "avoid-escape"],
"semi": [2, "always"],
"keyword-spacing": 2,
Expand Down
36 changes: 23 additions & 13 deletions postgres/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ const {
{ isList, isUri } = require('clayutils'),
TransformStream = require('../services/list-transform-stream'),
META_PUT_PATCH_FN = patch('meta');
var knex, log = require('../services/log').setup({ file: __filename });
var knex,
log = require('../services/log').setup({ file: __filename });

/**
* Connect to the default DB and create the Clay
Expand Down Expand Up @@ -241,24 +242,33 @@ function batch(ops) {
}

/**
* Return a readable stream of query results
* from the db
*
* @param {Object} options
* @return {Stream}
* Gets a list of components as a readable stream, can handle pagination.
* @param {Object} options
* @returns {Stream}
*/
function createReadStream(options) {
const { prefix, values, keys } = options,
transform = TransformStream(options),
selects = [];
const { prefix, values, keys, previous, size } = options;
const transform = TransformStream(options);
const selects = [];

if (keys) selects.push('id');
if (values) selects.push('data');

baseQuery(prefix)
.select(...selects)
.where('id', 'like', `${prefix}%`)
.pipe(transform);
const query = baseQuery(prefix);

query.select(...selects);
query.where('id', 'like', `${prefix}%`);

if (previous) {
query.where('id', '>', previous);
}

if (size) {
query.limit(size);
query.orderBy('id', 'asc');
}

query.pipe(transform);

return transform;
}
Expand Down
51 changes: 45 additions & 6 deletions postgres/client.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -435,12 +435,18 @@ describe('postgres/client', () => {

describe('createReadStream', () => {
const pipe = jest.fn(() => ({})),
where = jest.fn(() => ({ pipe })),
select = jest.fn(() => ({ where })),
withSchema = jest.fn(() => ({ select })),
where = jest.fn(() => ({})),
select = jest.fn(() => ({})),
withSchema = jest.fn(() => ({})),
limit = jest.fn(() => ({})),
orderBy = jest.fn(() => ({})),
knex = jest.fn(() => ({
withSchema,
select
select,
where,
limit,
orderBy,
pipe,
})),
mockedTransform = {};

Expand All @@ -454,7 +460,7 @@ describe('postgres/client', () => {
const options = {
prefix: 'nymag.com/_uris',
values: true,
keys: true
keys: true,
},
transform = client.createReadStream(options);

Expand All @@ -474,7 +480,7 @@ describe('postgres/client', () => {
const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false
keys: false,
},
transform = client.createReadStream(options);

Expand All @@ -486,6 +492,39 @@ describe('postgres/client', () => {
expect(where.mock.calls[0][2]).toBe(`${options.prefix}%`);
expect(transform).toBe(mockedTransform);
});

test('queries with a limit and order when page size is set', () => {
TransformStream.mockReturnValueOnce(mockedTransform);

const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false,
size: 20,
};

client.createReadStream(options);

expect(limit.mock.calls[0][0]).toBe(20);
expect(orderBy.mock.calls.length).toBe(1);
expect(orderBy.mock.calls[0]).toEqual(['id', 'asc']);
});

test('queries with where id > previous when previous is set', () => {
const options = {
prefix: 'nymag.com/_uris',
values: false,
keys: false,
size: 20,
previous: 'nymag.com/components/ad/instances/aaa',
};

client.createReadStream(options);

expect(where.mock.calls[1]).toEqual(['id', '>', options.previous]);
expect(orderBy.mock.calls.length).toBe(1);
expect(orderBy.mock.calls[0]).toEqual(['id', 'asc']);
});
});

describe('put', () => {
Expand Down