diff --git a/.gitignore b/.gitignore index b3425b4..4c136ac 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ package-lock.json .jshintrc # an extra local test in my checkout test-jimp.js +gcs-credentials-uploadfstest.json diff --git a/lib/storage/azure.js b/lib/storage/azure.js index 512e81e..e25619f 100644 --- a/lib/storage/azure.js +++ b/lib/storage/azure.js @@ -8,6 +8,7 @@ var utils = require('../utils.js'); var defaultGzipBlacklist = require('../../defaultGzipBlacklist'); var verbose = false; var _ = require('lodash'); +var regexpQuote = require('regexp-quote'); var DEFAULT_MAX_AGE_IN_SECONDS = 500; var DEFAULT_MAX_CACHE = 2628000; @@ -449,9 +450,49 @@ module.exports = function() { }); }, - getUrl: function(path) { + catalog: function(prefix, callback) { + var continuationToken; + var list = []; + return pass(); + function pass() { + const blobSvc = self.blobSvcs[0]; + return blobSvc.svc.listBlobsSegmentedWithPrefix(blobSvc.container, prefix, continuationToken, function(err, data) { + if (err) { + return callback(err); + } + data.entries.forEach(function(item) { + if (item.name.charAt(0) !== '/') { + item.name = '/' + item.name; + } + var matches = item.name.match(/^(.*)?-disabled-[0-9a-f]+$/); + if (matches && (utils.getDisabledPath(matches[1], self.options.disabledFileKey) === item.name)) { + list.push({ + path: matches[1].replace(new RegExp('^' + regexpQuote(prefix)), ''), + size: item.contentLength, + disabled: true, + updatedAt: item.lastModified + }); + } else { + list.push({ + path: item.name.replace(new RegExp('^' + regexpQuote(prefix)), ''), + size: item.contentLength, + updatedAt: item.lastModified, + disabled: false + }); + } + }); + if (data.continuationToken) { + continuationToken = data.continuationToken; + return pass(); + } + return callback(null, list); + }); + } + }, + + getUrl: function() { var blob = self.blobSvcs[0]; - var url = blob.svc.getUrl(blob.container, path); + var url = blob.svc.getUrl(blob.container, ''); return url; }, diff --git a/lib/storage/gcs.js b/lib/storage/gcs.js index ec64889..08a49a3 100644 --- a/lib/storage/gcs.js +++ b/lib/storage/gcs.js @@ -6,6 +6,8 @@ var storage = require('@google-cloud/storage'); var extname = require('path').extname; var _ = require('lodash'); +var regexpQuote = require('regexp-quote'); +var async = require('async'); var utils = require('../utils'); var path = require('path'); @@ -120,6 +122,47 @@ module.exports = function() { client.bucket(bucketName).file(path).makePrivate({}, callback); }, + catalog: function(prefix, callback) { + var list = []; + var rawList = []; + return async.series([ + getList, + getDisabled + ], function(err) { + if (err) { + return callback(err); + } + return callback(null, list); + }); + function getList(callback) { + return client.bucket(bucketName).getFiles({ + prefix + }, function(err, files) { + if (err) { + return callback(err); + } + rawList = files; + return callback(null); + }); + } + function getDisabled(callback) { + return async.eachLimit(rawList, 5, function(item, callback) { + return client.bucket(bucketName).file(item.name).isPublic(function(err, p) { + if (err) { + return callback(err); + } + list.push({ + path: item.name.replace(new RegExp('^' + regexpQuote(prefix)), ''), + size: item.metadata.size, + updatedAt: item.metadata.updated, + disabled: !p + }); + return callback(null); + }); + }, callback); + } + }, + getUrl: function (path) { noProtoEndpoint = endpoint.replace(/^https?:\/\//i, ''); const url = (https ? 'https://' : 'http://') + bucketName + '.' + noProtoEndpoint; diff --git a/lib/storage/local.js b/lib/storage/local.js index 199aefe..f646c60 100644 --- a/lib/storage/local.js +++ b/lib/storage/local.js @@ -209,6 +209,65 @@ module.exports = function() { // Exported for unit testing only _testCopyFile: function(path1, path2, options, callback) { return copyFile(path1, path2, options, callback); + }, + + catalog: function(prefix, callback) { + var list = []; + return spelunk(prefix, function(err) { + if (err) { + return callback(err); + } + list.sort(function(a, b) { + if (a.path < b.path) { + return -1; + } else if (a.path > b.path) { + return 1; + } else { + return 0; + } + }); + return callback(null, list); + }); + function spelunk(folder, callback) { + return fs.readdir(uploadsPath + folder, function(err, files) { + if (err) { + return callback(err); + } + return async.eachLimit(files, 5, function(file, callback) { + var path = folder + '/' + file; + return fs.stat(uploadsPath + path, function(err, stats) { + var matches; + if (err) { + return callback(err); + } + if (stats.isDirectory()) { + return spelunk(path, callback); + } else { + if (self.options.disabledFileKey) { + matches = file.match(/^(.*)?-disabled-[0-9a-f]+$/); + if (matches && (utils.getDisabledPath(folder + '/' + matches[1]) === path)) { + list.push({ + path: folder + '/' + matches[1], + disabled: true + }); + } else { + list.push({ + path: folder + '/' + file, + disabled: false + }); + } + } else { + list.push({ + path: folder + '/' + file, + disabled: (stats.mode & parseInt('777', 8)) === self.getDisablePermissions() + }); + } + return callback(null); + } + }); + }, callback); + }); + } } }; diff --git a/lib/storage/s3.js b/lib/storage/s3.js index b463d74..c93d377 100644 --- a/lib/storage/s3.js +++ b/lib/storage/s3.js @@ -7,6 +7,8 @@ var fs = require('fs'); var AWS = require('aws-sdk'); var extname = require('path').extname; var _ = require('lodash'); +var regexpQuote = require('regexp-quote'); +var async = require('async'); const utils = require('../utils'); module.exports = function() { @@ -174,6 +176,59 @@ module.exports = function() { }, callback); }, + catalog: function(prefix, callback) { + var continuationToken; + var list = []; + return pass(); + function pass() { + var args = { + Bucket: bucket + }; + if (prefix) { + args.Prefix = prefix; + } + if (continuationToken) { + args.ContinuationToken = continuationToken; + } + return client.listObjectsV2(args, function(err, data) { + if (err) { + return callback(err); + } + list = list.concat(data.Contents.map(function(item) { + return { + path: item.Key.replace(new RegExp('^' + regexpQuote(prefix)), ''), + size: item.Size, + updatedAt: item.LastModified + }; + })); + if (data.IsTruncated) { + return pass(); + } + // Boo, this is slow, we should switch to supporting disabledFileKey + // for s3 so we can avoid this + return async.eachLimit(list, 5, function(file, callback) { + return client.getObjectAcl({ + Bucket: bucket, + Key: utils.removeLeadingSlash(self.options, file.path) + }, function(err, acl) { + if (err) { + return callback(err); + } + file.disabled = !_.find(acl.Grants || [], function(grant) { + return grant.Grantee && (grant.Grantee.URI === 'http://acs.amazonaws.com/groups/global/AllUsers') && (grant.Permission === 'READ'); + }); + return callback(null); + }); + }, function(err) { + if (err) { + return callback(err); + } + return callback(null, list); + }); + }); + } + }, + getUrl: function (path) { let url; noProtoEndpoint = endpoint.replace(/^https?:\/\//i, ''); diff --git a/package.json b/package.json index 08e50db..eec77fe 100644 --- a/package.json +++ b/package.json @@ -21,12 +21,17 @@ "author": "Apostrophe Technologies, Inc.", "license": "MIT", "dependencies": { + "@google-cloud/storage": "^4.7.0", "async": "^1.0.0", + "aws-sdk": "^2.713.0", + "azure-storage": "^2.8.2", "bluebird": "^3.7.2", "es6-promise": "^4.1.0", "fs-extra": "^5.0.0", "gm": "^1.9.0", "gzipme": "^0.1.1", + "mkdirp": "~0.3.4", + "regexp-quote": "0.0.0", "jimp": "^0.13.0", "lodash": "^4.17.21", "request": "^2.88.2", @@ -39,6 +44,12 @@ "azure-storage": "^2.8.2" }, "devDependencies": { + "eslint-config-punkave": "^1.0.10", + "eslint-config-standard": "^11.0.0-beta.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^5.2.1", + "eslint-plugin-promise": "^3.7.0", + "eslint-plugin-standard": "^3.1.0", "eslint": "^7.26.0", "eslint-config-apostrophe": "^3.4.0", "mocha": "^8.4.0", diff --git a/test/azure.js b/test/azure.js index fd7788c..117dd75 100644 --- a/test/azure.js +++ b/test/azure.js @@ -6,7 +6,7 @@ var uploadfs = require('../uploadfs.js')(); // A JPEG is not a good default because it is exempt from GZIP so // we get less coverage. -Tom var srcFile = process.env.AZURE_TEST_FILE || 'test.txt'; -var infilePath = 'one/two/three/'; +var infilePath = '/one/two/three/'; var infile = infilePath + srcFile; var _ = require('lodash'); @@ -94,6 +94,16 @@ describe('UploadFS Azure', function() { }); }); + it('catalog should work', function (done) { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === infile) && (!file.disabled); + })); + done(); + }); + }); + it('Azure test copyOut should work', function(done) { _getOutfile(infile, done); }); @@ -122,6 +132,16 @@ describe('UploadFS Azure', function() { }, 5000); }); + it('catalog should show file as disabled', function (done) { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === infile) && (file.disabled); + })); + done(); + }); + }); + it('Azure enable should work', function(done) { uploadfs.enable(infile, function(e, val) { if (e) { diff --git a/test/gcs.js b/test/gcs.js index be4f353..4f1fb2e 100644 --- a/test/gcs.js +++ b/test/gcs.js @@ -1,6 +1,7 @@ /* global describe, it */ const assert = require('assert'); const request = require('request'); +const _ = require('lodash'); describe('UploadFS GCS', function () { this.timeout(20000); @@ -58,6 +59,16 @@ describe('UploadFS GCS', function () { }); }); + it('catalog should work', function (done) { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === dstPath) && (!file.disabled); + })); + done(); + }); + }); + it('CopyIn file should be available via gcs', function (done) { const url = uploadfs.getUrl() + '/one/two/three/test.txt'; const og = fs.readFileSync('test.txt', 'utf8'); @@ -95,6 +106,15 @@ describe('UploadFS GCS', function () { cb(null); }); }, + catalog: cb => { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === dstPath) && (file.disabled); + })); + return cb(null); + }); + }, enable: cb => { uploadfs.enable(dstPath, e => { assert(!e, 'uploadfs enable should not fail'); diff --git a/test/local.js b/test/local.js index 1e20a06..6566ef4 100644 --- a/test/local.js +++ b/test/local.js @@ -1,6 +1,7 @@ /* global describe, it */ var Mode = require('stat-mode'); var assert = require('assert'); +var _ = require('lodash'); var path = require('path'); describe('UploadFS Local', function () { @@ -54,6 +55,16 @@ describe('UploadFS Local', function () { }); }); + it('catalog should work', function (done) { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === '/test_copy.txt') && (!file.disabled); + })); + done(); + }); + }); + it('copyOut should work for local filesystem', done => { return uploadfs.copyOut('/test_copy.txt', 'copy-out-test.txt', e => { assert(!e); @@ -96,7 +107,7 @@ describe('UploadFS Local', function () { return async.series({ disable: cb => { - assert(fs.existsSync(infile), 'copyIn file exissts'); + assert(fs.existsSync(infile), 'copyIn file exists'); uploadfs.disable(srcFile, e => { var stats = fs.statSync(infile); @@ -106,6 +117,17 @@ describe('UploadFS Local', function () { return cb(null); }); }, + catalog: cb => { + return uploadfs.catalog(function(e, list) { + if (e) { + return cb(e); + } + assert(_.find(list, function(file) { + return (file.path === '/test_copy.txt') && (file.disabled); + })); + return cb(null); + }); + }, enable: cb => { uploadfs.enable(srcFile, e => { var stats = fs.statSync(infile); diff --git a/test/s3.js b/test/s3.js index 00a960c..53b039b 100644 --- a/test/s3.js +++ b/test/s3.js @@ -1,6 +1,7 @@ /* global describe, it */ const assert = require('assert'); const request = require('request'); +const _ = require('lodash'); describe('UploadFS S3', function () { this.timeout(50000); @@ -73,6 +74,16 @@ describe('UploadFS S3', function () { }); }); + it('catalog should work', function (done) { + return uploadfs.catalog(function(e, list) { + assert(!e); + assert(_.find(list, function(file) { + return (file.path === dstPath) && (!file.disabled); + })); + done(); + }); + }); + it('S3 CopyOut should work', done => { const cpOutPath = 'copy-out-test.txt'; return uploadfs.copyOut(dstPath, cpOutPath, e => { @@ -101,6 +112,19 @@ describe('UploadFS S3', function () { cb(null); }); }, + catalog: cb => { + uploadfs.catalog((e, list) => { + if (e) { + return cb(e); + } + if (!_.find(list, function(item) { + return (item.path === dstPath) && item.disabled + })) { + return cb('catalog does not show test.txt as disabled'); + } + return cb(null); + }); + }, enable: cb => { uploadfs.enable(dstPath, e => { assert(!e, 'uploadfs enable should not fail'); diff --git a/uploadfs.js b/uploadfs.js index 886deec..4f03ee7 100644 --- a/uploadfs.js +++ b/uploadfs.js @@ -386,11 +386,11 @@ function Uploadfs() { }); }; - self.getUrl = function (options, callback) { + self.getUrl = function () { if (self.cdn && self.cdn.enabled) { return self.cdn.url; } - return self._storage.getUrl(options, callback) + self.prefix; + return self._storage.getUrl() + self.prefix; }; self.remove = function (path, callback) { @@ -438,6 +438,19 @@ function Uploadfs() { return self._storage.disable(path, callback); }; + /** + * + * Callback receives `(null, list)` where `list` is an array + * of all files with `path` and `trash` properties. This + * method respects the `prefix` option, so only files matching + * the prefix are included. + * @param {*} path + * @param {*} callback + */ + self.catalog = function (callback) { + return self._storage.catalog(self.prefix, callback); + }; + /** * Identify a local image file. Normally you don't need to call * this yourself, it is mostly used by copyImageIn. But you may find it