diff --git a/lib/canned.js b/lib/canned.js index cf3f271..75995e9 100644 --- a/lib/canned.js +++ b/lib/canned.js @@ -8,8 +8,13 @@ var querystring = require('querystring') var url = require('url') var cannedUtils = require('./utils') var lookup = require('./lookup') +var ResponseParser = require('./response-parser') +var RequestParser = require('./request-parser') var _ = require('lodash') +var responseParser = new ResponseParser(); +var requestParser = new RequestParser(); + function Canned(dir, options) { this.logger = options.logger this.wildcard = options.wildcard || 'any' @@ -90,16 +95,6 @@ function getContentType(mimetype){ return Response.content_types[mimetype] } -function stringifyValues(object) { - _.each(object, function(value, key) { - if (typeof value === "object") { - stringifyValues(value); - } else { - object[key] = String(value) - } - }) -} - function isContentTypeJson(request) { var isJson = false; if (request.headers && request.headers['content-type']) { @@ -108,42 +103,28 @@ function isContentTypeJson(request) { return isJson; } - Canned.prototype.parseMetaData = function(response) { - var metaData = {} + var metaData = {request: {}}, + lines; + // convert CR+LF => LF+LF, CR => LF, fixes line breaks causing issues in windows response = response.replace("\r", "\n"); - var lines = response.split("\n") - var that = this - var optionsMatch = new RegExp(/\/\/!.*[statusCode|contentType|customHeaders]/g) - var requestMatch = new RegExp(/\/\/! [body|params|header]+: ([\w {}":\[\]\-\+\%,@.\/]*)/g) + // we only care about special comment lines + lines = response.split("\n").filter(function(line) { + return line.indexOf("//!") === 0; + }) lines.forEach(function(line) { - if(line.indexOf("//!") === 0) { // special comment line - var matchedRequest = requestMatch.exec(line) - if(matchedRequest) { - metaData.request = JSON.parse(matchedRequest[1]) - stringifyValues(metaData.request); - return - } - var matchedOptions = optionsMatch.exec(line) - if(matchedOptions) { - try { - line = line.replace("//!", '') - var content = line.split(',').map(function (s) { - var parts = s.split(':'); - parts[0] = '"' + parts[0].trim() + '"' - return parts.join(':') - }).join(',') - var opts = JSON.parse('{' + content + '}') - cannedUtils.extend(metaData, opts) - } catch(e) { - that._log('Invalid file header format try //! statusCode: 201') - } - return - } + // extract any request attributes + var requestAttrs = requestParser.parse(line); + if (Object.keys(requestAttrs).length > 0) { + cannedUtils.extend(metaData.request, requestAttrs); } + + var responseAttrs = responseParser.parse(line); + // extract any response attributes + cannedUtils.extend(metaData, responseAttrs) }) return metaData @@ -160,7 +141,7 @@ Canned.prototype.getSelectedResponse = function(responses, content, headers) { customHeaders: metaData.customHeaders } - stringifyValues(content); + cannedUtils.stringifyValues(content); responses.forEach(function(response) { var metaData = that.parseMetaData(response) @@ -314,7 +295,7 @@ Canned.prototype.responder = function(body, req, res) { } } } - + var paths = lookup(httpObj.pathname.join('/'), that.wildcard); paths.splice(0,1); // The first path is the default responseHandler = function (err, resp) { @@ -397,4 +378,4 @@ Canned.prototype.responseFilter = function (req, res) { } } -module.exports = Canned; \ No newline at end of file +module.exports = Canned; diff --git a/lib/request-parser.js b/lib/request-parser.js new file mode 100644 index 0000000..f73d595 --- /dev/null +++ b/lib/request-parser.js @@ -0,0 +1,33 @@ +var cannedUtils = require('./utils'); +var requestMatch = /\/\/!.*(?:body|params|header):\s+([\w {}":\[\]\-\+\%,@.\/]*)/g; + +var RequestParser = function() {} + +function parseRequestOptions(line) { + var match, + requestItems = {}; + + while (match = requestMatch.exec(line)) { + try { + cannedUtils.recursiveMerge(requestItems, JSON.parse(match[1])); + } catch (e) { + console.log(e); + //@todo some logging + } + } + + return requestItems; +} + +function parseEntry(lines) { + var result = {}; + lines.split('\n').forEach(function(line) { + cannedUtils.recursiveMerge(result, parseRequestOptions(line)); + }); + return result; +} + +RequestParser.prototype.parse = parseRequestOptions; +RequestParser.prototype.parseEntry = parseEntry; + +module.exports = RequestParser; diff --git a/lib/response-parser.js b/lib/response-parser.js new file mode 100644 index 0000000..5165902 --- /dev/null +++ b/lib/response-parser.js @@ -0,0 +1,53 @@ +var cannedUtils = require('./utils'); + +var responseMatch = /\/\/!.*(?:statusCode|contentType|customHeaders)/g; + +/** + * the ResponseParser is responsible for collecting the intended return + * status code, content type and header declarations. + */ +function ResponseParser() {} + +/** + * _parseresponse takes a single line from a file and extracts + * JSON data if possible. Returns an object. + */ +function parseLine(line) { + var match, + response = {}; + + while (responseMatch.exec(line)) { + try { + // drop the magix comment + line = line.replace("//!", '').trim(); + + var content = line.split(',').map(function (s) { + var parts = s.split(':'); + if (parts[0].trim()[-1] !== '"') { + parts[0] = '"' + parts[0].trim() + '"' + } + return parts.join(':') + }).join(',') + + response = cannedUtils.recursiveMerge(response, JSON.parse('{' + content + '}')) + } catch(e) { + console.log(e); + //@todo pass in log and get cracking + } + } + + return response; +} + +function parseEntry(lines) { + var result = {}; + lines.split('\n').forEach(function(line) { + cannedUtils.recursiveMerge(result, parseLine(line)); + }); + return result; +} + +ResponseParser.prototype.parse = parseLine; +ResponseParser.prototype.parseEntry = parseEntry; + +module.exports = ResponseParser; diff --git a/lib/utils.js b/lib/utils.js index 5334365..71e8731 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,5 +1,7 @@ "use strict"; +var _ = require('lodash') + var utils = module.exports = {} utils.escapeRegexSpecialChars = function (text) { @@ -18,6 +20,31 @@ utils.extend = function (target) { return target } +/** + * recursively merge an object onto target, preserving existing keys. + * Modified from http://stackoverflow.com/a/383245/771564 + */ +utils.recursiveMerge = function(target, other) { + if (!other) { + return target; + } + + for (var prop in other) { + try { + // Property in destination targetect set; update its value. + if ( other[prop].constructor == Object ) { + target[prop] = utils.recursiveMerge(target[prop], other[prop]); + } else { + target[prop] = other[prop]; + } + } catch(e) { + // Property in destination targetect not set; create it and set its value. + target[prop] = other[prop]; + } + } + return target; +} + utils.removeJSLikeComments = function (text) { return text.replace(/\/\*.+?\*\/|\/\/\s.*(?=[\n\r])/g, '') } @@ -27,3 +54,13 @@ utils.removeSpecialComments = function (data) { return line.indexOf("//!") !== 0 }).join("\n").trim() } + +utils.stringifyValues = function(object) { + _.each(object, function(value, key) { + if (typeof value === "object") { + utils.stringifyValues(value); + } else { + object[key] = String(value) + } + }) +} diff --git a/spec/canned.spec.js b/spec/canned.spec.js index 56d5f07..321f4d2 100644 --- a/spec/canned.spec.js +++ b/spec/canned.spec.js @@ -556,15 +556,104 @@ describe('canned', function () { expect(parsedMeta).toEqual({ request: { serialkey: 'abc' - }, - params: { - serialkey: '12121' } }); done(); }) }) + describe("parsing metadata", function() { + var Canned, can; + + beforeEach(function() { + Canned = require('../lib/canned') + can = new Canned('./spec/test_responses', {}); + }) + + it("Should accept statusCode", function(done) { + var mock_text = '//! statusCode: 418'; + var parsedMeta = can.parseMetaData(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418 + }); + done(); + }) + + it("Should accept customHeaders", function(done) { + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}'; + var parsedMeta = can.parseMetaData(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: 'Bearer xyz' + } + }); + done(); + }) + + it("Should accept request body", function(done) { + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}\n' + + '//! customHeaders: {"Location": "Wimbledon Common"}\n' + + '//! body: {"colour": "green"}'; + var parsedMeta = can.parseMetaData(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: 'Bearer xyz', + Location: 'Wimbledon Common' + }, + request: { + colour: 'green' + } + }); + done(); + }) + + it("Should accept request body", function(done) { + var Canned = require('../lib/canned') + var can = new Canned('./spec/test_responses', {}); + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}\n' + + '//! body: {"colour": "green"}'; + var parsedMeta = can.parseMetaData(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: 'Bearer xyz' + }, + request: { + colour: 'green' + } + }); + done(); + }) + + it("Should apply the latest request params, body or header specified", function(done) { + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}\n' + + '//! body: {"colour": "green"}\n' + + '//! params: {"count": 126}'; + var parsedMeta = can.parseMetaData(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: 'Bearer xyz' + }, + request: { + count: '126', + } + }); + done(); + }) + }) + describe("variable POST responses", function() { var req, data beforeEach(function() { diff --git a/spec/request-parser.spec.js b/spec/request-parser.spec.js new file mode 100644 index 0000000..e764622 --- /dev/null +++ b/spec/request-parser.spec.js @@ -0,0 +1,41 @@ +var RequestParser = require('../lib/request-parser'); + +describe("request parser", function() { + var parser; + + beforeEach(function() { + parser = new RequestParser(); + }) + + it("does something useful", function(done) { + var fake_content = '//! body: {"frog_count": 200}'; + res = parser.parse(fake_content); + expect(res).toEqual({ + frog_count: 200 + }); + done(); + }) + + it("returns an empty object if there are no params", function(done) { + var fake_file_content = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "13r098asflj"}\n' + + '//! customHeaders: {"Location": "https://example.com"}'; + + res = parser.parseEntry(fake_file_content); + expect(res).toEqual({}); + done(); + }) + + it("Should accept request body", function(done) { + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}\n' + + '//! customHeaders: {"Location": "Wimbledon Common"}\n' + + '//! body: {"colour": "green"}'; + var parsedMeta = parser.parseEntry(mock_text); + + expect(parsedMeta).toEqual({ + colour: "green" + }); + done(); + }) +}) diff --git a/spec/response-parser.spec.js b/spec/response-parser.spec.js new file mode 100644 index 0000000..675ed98 --- /dev/null +++ b/spec/response-parser.spec.js @@ -0,0 +1,62 @@ +var ResponseParser = require('../lib/response-parser'); + +describe("response parser", function() { + var parser; + + beforeEach(function() { + parser = new ResponseParser(); + }) + + it("does something useful", function(done) { + var fake_content = "//! statusCode: 200"; + res = parser.parse(fake_content); + expect(res).toEqual({ + statusCode: 200 + }); + done(); + }) + + it("does something useful", function(done) { + var fake_content = '//! customHeaders: {"Authorization": "13r098asflj"}'; + res = parser.parse(fake_content); + expect(res).toEqual({ + customHeaders: { + Authorization: "13r098asflj" + } + }); + done(); + }) + + it("combines for ultimate badassery", function(done) { + var fake_file_content = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "13r098asflj"}\n' + + '//! customHeaders: {"Location": "https://example.com"}'; + + res = parser.parseEntry(fake_file_content); + expect(res).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: "13r098asflj", + Location: "https://example.com" + } + }); + done(); + }) + + it("Should accept request body", function(done) { + var mock_text = '//! statusCode: 418\n' + + '//! customHeaders: {"Authorization": "Bearer xyz"}\n' + + '//! customHeaders: {"Location": "Wimbledon Common"}\n' + + '//! body: {"colour": "green"}'; + var parsedMeta = parser.parseEntry(mock_text); + + expect(parsedMeta).toEqual({ + statusCode: 418, + customHeaders: { + Authorization: 'Bearer xyz', + Location: 'Wimbledon Common' + } + }); + done(); + }) +}) diff --git a/spec/utils.spec.js b/spec/utils.spec.js new file mode 100644 index 0000000..965b737 --- /dev/null +++ b/spec/utils.spec.js @@ -0,0 +1,25 @@ +var utils = require('../lib/utils'); + +describe('recursiveMerge', function() { + it("adds properties to the target object", function(done) { + var first = {}, + second = { "horses": "best of all the animals" }; + + utils.recursiveMerge(first, second); + + expect(first).toEqual(second); + + done(); + }) + + it("merges properties on the target object without clobbering", function(done) { + var first = {"animals": {"frogs": "jump a lot"}}, + second = { "animals" : { "horses": "best of all the animals" } }; + + utils.recursiveMerge(first, second); + + expect(first).toEqual({"animals": {"frogs": "jump a lot", "horses": "best of all the animals"}}); + + done(); + }) +})