diff --git a/postgres/client.js b/postgres/client.js index 3d55edb..033ffea 100644 --- a/postgres/client.js +++ b/postgres/client.js @@ -5,7 +5,7 @@ const { POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_HOST, POSTGRES_PORT, POSTGRES { parseOrNot, wrapInObject, decode } = require('../services/utils'), { findSchemaAndTable, wrapJSONStringInObject } = require('../services/utils'), knexLib = require('knex'), - { isList, isUri } = require('clayutils'), + { isList, isUri, isPage } = require('clayutils'), TransformStream = require('../services/list-transform-stream'), META_PUT_PATCH_FN = patch('meta'); var knex, log = require('../services/log').setup({ file: __filename }); @@ -130,10 +130,17 @@ function columnToValueMap(column, value, obj = {}) { */ function put(key, value) { const { schema, table } = findSchemaAndTable(key), - map = columnToValueMap('id', key); // create the value map + map = columnToValueMap('id', key), // create the value map + isPublicEntity = isPage(key) || isList(key) || isUri(key), + parsedValue = parseOrNot(value); + + if (isPublicEntity && parsedValue.siteSlug) { + // add site id column to map + columnToValueMap('site_id', parsedValue.siteSlug, map); + } // add data to the map - columnToValueMap('data', wrapInObject(key, parseOrNot(value)), map); + columnToValueMap('data', wrapInObject(key, parsedValue.data || parsedValue), map); let url; @@ -144,7 +151,6 @@ function put(key, value) { columnToValueMap('url', url, map); } - return onConflictPut(map, schema, table) .then(() => map.data); } @@ -207,15 +213,23 @@ function del(key) { * @param {[type]} ops [description] * @return {[type]} [description] */ +/* eslint complexity: 0 */ function batch(ops) { var commands = [], url; - for (let i = 0; i < ops.length; i++) { + for (let i = 0, opsLength = ops.length; i < opsLength; i++) { let { key, value } = ops[i], { table, schema } = findSchemaAndTable(key), - map = columnToValueMap('id', key); + map = columnToValueMap('id', key), + isPublicEntity = isPage(key) || isList(key) || isUri(key), + parsedValue = isPublicEntity ? parseOrNot(value) : { data: value }; + + if (isPublicEntity && parsedValue.siteSlug) { + // add site id column to map + columnToValueMap('site_id', parsedValue.siteSlug, map); + } - columnToValueMap('data', wrapJSONStringInObject(key, value), map); + columnToValueMap('data', wrapJSONStringInObject(key, parsedValue.data), map); // add url column to map if putting a uri if (isUri(key)) { diff --git a/postgres/client.test.js b/postgres/client.test.js index 0ce693b..7c7b5db 100644 --- a/postgres/client.test.js +++ b/postgres/client.test.js @@ -566,6 +566,27 @@ describe('postgres/client', () => { expect(data).toEqual(data); }); }); + + test('inserts a row with a column for site id', () => { + const key = 'nymag.com/_pages/sample-article', + tableName = 'pages', + putData = { data, siteSlug: 'nymag' }; + + client.setClient(knex); + + return client.put(key, putData).then((data) => { + expect(table.mock.calls.length).toBe(1); + expect(table.mock.calls[0][0]).toBe(tableName); + expect(insert.mock.calls.length).toBe(1); + expect(insert.mock.calls[0][0]).toEqual({ id: key, data, site_id: 'nymag' }); + expect(queryBuilder.mock.calls.length).toBe(1); + expect(update.mock.calls.length).toBe(1); + expect(raw.mock.calls.length).toBe(1); + expect(raw.mock.calls[0][0]).toBe('? ON CONFLICT (id) DO ? returning *'); + expect(raw.mock.calls[0][1]).toEqual(['insert sql', 'update sql']); + expect(putData.data).toEqual(data); + }); + }); }); describe('putMeta', () => { @@ -674,11 +695,11 @@ describe('postgres/client', () => { ops = [ { key: 'nymag.com/_uris/someinstance', - value: 'nymag.com/_pages/someinstance' + value: { data: 'nymag.com/_pages/someinstance', siteSlug: 'nymag' } }, { key: 'nymag.com/_uris/someotherinstance', - value: 'nymag.com/_pages/someotherinstance' + value: { data: 'nymag.com/_pages/someotherinstance', siteSlug: 'nymag' } } ]; @@ -694,7 +715,7 @@ describe('postgres/client', () => { for (let index = 0; index < results.length; index++) { expect(table.mock.calls[index][0]).toBe('uris'); - expect(insert.mock.calls[index][0]).toEqual({ id: ops[index].key, data: results[index], url: decode(ops[index].key.split('/_uris/').pop()) }); + expect(insert.mock.calls[index][0]).toEqual({ id: ops[index].key, data: results[index], url: decode(ops[index].key.split('/_uris/').pop()), site_id: 'nymag' }); expect(raw.mock.calls[index][0]).toBe('? ON CONFLICT (id) DO ? returning *'); expect(raw.mock.calls[index][1]).toEqual(['insert sql', 'update sql']); } diff --git a/postgres/index.js b/postgres/index.js index 6b7322b..9dce850 100644 --- a/postgres/index.js +++ b/postgres/index.js @@ -42,6 +42,7 @@ function createTables() { return bluebird.all(getComponents().map(component => client.createTable(`components.${component}`))) .then(() => bluebird.all(getLayouts().map(layout => client.createTableWithMeta(`layouts.${layout}`)))) .then(() => client.createTableWithMeta('pages')) + .then(() => client.raw('CREATE TABLE IF NOT EXISTS ?? ( id TEXT PRIMARY KEY NOT NULL, data TEXT NOT NULL, url TEXT );', ['uris'])) .then(() => createRemainingTables()); } @@ -59,24 +60,22 @@ function setup(testPostgresHost) { } return client.connect() - .then(() => { - return migrate( - { - database: POSTGRES_DB, - user: POSTGRES_USER, - password: POSTGRES_PASSWORD, - host: postgresHost, - port: POSTGRES_PORT - }, - path.join(__dirname, '../services/migrations') - ); - }) - .then(() => { - log('info', 'Migrations Complete'); - }) - .then(() => createTables()) + .then(() => client.createSchema('components')) + .then(() => client.createSchema('layouts')) + .then(createTables) + .then(() => migrate( + { + database: POSTGRES_DB, + user: POSTGRES_USER, + password: POSTGRES_PASSWORD, + host: postgresHost, + port: POSTGRES_PORT + }, + path.join(__dirname, '../services/migrations') + )) + .then(() => log('info', 'Migrations Complete')) .then(() => ({ server: `${postgresHost}:${POSTGRES_PORT}` })) - .catch(logGenericError); + .catch(logGenericError(__filename)); } module.exports.setup = setup; diff --git a/redis/index.js b/redis/index.js index c5f84a8..4611cc0 100644 --- a/redis/index.js +++ b/redis/index.js @@ -4,7 +4,8 @@ const bluebird = require('bluebird'), Redis = require('ioredis'), { REDIS_URL, REDIS_HASH } = require('../services/constants'), { isPublished, isUri, isUser } = require('clayutils'), - { notFoundError, logGenericError } = require('../services/errors'); + { notFoundError, logGenericError } = require('../services/errors'), + { parseOrNot } = require('../services/utils'); var log = require('../services/log').setup({ file: __filename }); /** @@ -50,7 +51,11 @@ function shouldProcess(key) { function put(key, value) { if (!shouldProcess(key)) return bluebird.resolve(); - return module.exports.client.hsetAsync(REDIS_HASH, key, value); + const data = isUri(key) + ? parseOrNot(value).data || value + : value; + + return module.exports.client.hsetAsync(REDIS_HASH, key, data); } /** @@ -69,7 +74,7 @@ function get(key) { } /** - * [batch description] + * Makes a batch operation to redis * @param {[type]} ops * @return {[type]} */ @@ -84,8 +89,12 @@ function batch(ops) { let { key, value } = ops[i]; if (shouldProcess(key)) { + const data = isUri(key) + ? parseOrNot(value).data || value + : value; + batch.push(key); - batch.push(value); + batch.push(data); } } diff --git a/services/migrations/001_create_components_schema.sql b/services/migrations/001_create_components_schema.sql deleted file mode 100644 index 042d10e..0000000 --- a/services/migrations/001_create_components_schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS components; diff --git a/services/migrations/001_create_timestamp_trigger.sql b/services/migrations/001_create_timestamp_trigger.sql new file mode 100644 index 0000000..cc917c3 --- /dev/null +++ b/services/migrations/001_create_timestamp_trigger.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION trigger_set_timestamp() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; diff --git a/services/migrations/002_add_created_updated_columns.sql b/services/migrations/002_add_created_updated_columns.sql new file mode 100644 index 0000000..a62a5da --- /dev/null +++ b/services/migrations/002_add_created_updated_columns.sql @@ -0,0 +1,15 @@ +ALTER TABLE IF EXISTS uris +ADD COLUMN IF NOT EXISTS created_at TIMESTAMPTZ, +ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ; + +ALTER TABLE IF EXISTS lists +ADD COLUMN IF NOT EXISTS created_at TIMESTAMPTZ, +ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ; + +ALTER TABLE IF EXISTS pages +ADD COLUMN IF NOT EXISTS created_at TIMESTAMPTZ, +ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ; + +ALTER TABLE IF EXISTS users +ADD COLUMN IF NOT EXISTS created_at TIMESTAMPTZ, +ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ; diff --git a/services/migrations/002_create_layouts_schema.sql b/services/migrations/002_create_layouts_schema.sql deleted file mode 100644 index 8a3e1dc..0000000 --- a/services/migrations/002_create_layouts_schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS layouts; diff --git a/services/migrations/003_add_update_trigger.sql b/services/migrations/003_add_update_trigger.sql new file mode 100644 index 0000000..8df32ff --- /dev/null +++ b/services/migrations/003_add_update_trigger.sql @@ -0,0 +1,19 @@ +CREATE TRIGGER set_timestamp +BEFORE UPDATE ON lists +FOR EACH ROW +EXECUTE PROCEDURE trigger_set_timestamp(); + +CREATE TRIGGER set_timestamp +BEFORE UPDATE ON uris +FOR EACH ROW +EXECUTE PROCEDURE trigger_set_timestamp(); + +CREATE TRIGGER set_timestamp +BEFORE UPDATE ON pages +FOR EACH ROW +EXECUTE PROCEDURE trigger_set_timestamp(); + +CREATE TRIGGER set_timestamp +BEFORE UPDATE ON users +FOR EACH ROW +EXECUTE PROCEDURE trigger_set_timestamp(); diff --git a/services/migrations/003_create_pages_table.sql b/services/migrations/003_create_pages_table.sql deleted file mode 100644 index 8bb3792..0000000 --- a/services/migrations/003_create_pages_table.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE IF NOT EXISTS "pages" ( id TEXT PRIMARY KEY NOT NULL, data JSONB, meta JSONB ); diff --git a/services/migrations/004_add_created_at_values_pages.sql b/services/migrations/004_add_created_at_values_pages.sql new file mode 100644 index 0000000..a5f80dc --- /dev/null +++ b/services/migrations/004_add_created_at_values_pages.sql @@ -0,0 +1,7 @@ +UPDATE pages +SET created_at = subquery.created_at +FROM (SELECT id, meta ->> 'createdAt' as created_at + FROM pages + WHERE meta IS NOT NULL + AND meta ->> 'createdAt' IS NOT NULL) AS subquery +WHERE pages.id = subquery.id; diff --git a/services/migrations/004_create_uris_table.sql b/services/migrations/004_create_uris_table.sql deleted file mode 100644 index 2d74761..0000000 --- a/services/migrations/004_create_uris_table.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE TABLE IF NOT EXISTS "uris" ( id TEXT PRIMARY KEY NOT NULL, data TEXT NOT NULL, url TEXT ); diff --git a/services/migrations/005_add_updated_at_values_pages.sql b/services/migrations/005_add_updated_at_values_pages.sql new file mode 100644 index 0000000..f76fd0f --- /dev/null +++ b/services/migrations/005_add_updated_at_values_pages.sql @@ -0,0 +1,7 @@ +UPDATE pages +SET updated_at = subquery.updated_at +FROM (SELECT id, meta ->> 'updateTime' as updated_at + FROM pages + WHERE meta IS NOT NULL + AND meta ->> 'updateTime' IS NOT NULL) AS subquery +WHERE pages.id = subquery.id; diff --git a/services/migrations/006_add_default_created_updated_values.sql b/services/migrations/006_add_default_created_updated_values.sql new file mode 100644 index 0000000..3d7f53b --- /dev/null +++ b/services/migrations/006_add_default_created_updated_values.sql @@ -0,0 +1,15 @@ +ALTER TABLE IF EXISTS lists +ALTER COLUMN IF EXISTS created_at SET DEFAULT NOW(), +ALTER COLUMN IF EXISTS updated_at SET DEFAULT NOW(); + +ALTER TABLE IF EXISTS uris +ALTER COLUMN IF EXISTS created_at SET DEFAULT NOW(), +ALTER COLUMN IF EXISTS updated_at SET DEFAULT NOW(); + +ALTER TABLE IF EXISTS pages +ALTER COLUMN IF EXISTS created_at SET DEFAULT NOW(), +ALTER COLUMN IF EXISTS updated_at SET DEFAULT NOW(); + +ALTER TABLE IF EXISTS users +ALTER COLUMN IF EXISTS created_at SET DEFAULT NOW(), +ALTER COLUMN IF EXISTS updated_at SET DEFAULT NOW(); diff --git a/services/migrations/007_add_site_id_column.sql b/services/migrations/007_add_site_id_column.sql new file mode 100644 index 0000000..2627379 --- /dev/null +++ b/services/migrations/007_add_site_id_column.sql @@ -0,0 +1,8 @@ +ALTER TABLE IF EXISTS lists +ADD COLUMN IF NOT EXISTS site_id VARCHAR(255); + +ALTER TABLE IF EXISTS pages +ADD COLUMN IF NOT EXISTS site_id VARCHAR(255); + +ALTER TABLE IF EXISTS uris +ADD COLUMN IF NOT EXISTS site_id VARCHAR(255); diff --git a/services/migrations/008_add_site_id_values_pages.sql b/services/migrations/008_add_site_id_values_pages.sql new file mode 100644 index 0000000..05ef51d --- /dev/null +++ b/services/migrations/008_add_site_id_values_pages.sql @@ -0,0 +1,7 @@ +UPDATE pages +SET site_id = subquery.site_slug +FROM (SELECT id, meta ->> 'siteSlug' as site_slug + FROM pages + WHERE meta IS NOT NULL + AND meta ->> 'siteSlug' IS NOT NULL) AS subquery +WHERE pages.id = subquery.id; diff --git a/services/migrations/009_add_site_id_values_uris.sql b/services/migrations/009_add_site_id_values_uris.sql new file mode 100644 index 0000000..8e28e10 --- /dev/null +++ b/services/migrations/009_add_site_id_values_uris.sql @@ -0,0 +1,7 @@ +UPDATE uris +SET site_id = subquery.site_slug +FROM (SELECT id, meta ->> 'siteSlug' as site_slug + FROM pages + WHERE meta IS NOT NULL + AND meta ->> 'siteSlug' IS NOT NULL) AS subquery +WHERE uris.data = subquery.id;