diff --git a/README.md b/README.md index 9e5ff96..def9b44 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ and Parse Server [issue #1582](https://github.com/ParsePlatform/parse-server/iss ## Installation 1. Clone the repo: `git clone git@github.com:parse-server-modules/parse-files-utils.git` -2. cd into repo: `cd parse-file-utils` +2. cd into repo: `cd parse-files-utils` 3. Install dependencies: `npm install` ## Usage diff --git a/config.example.js b/config.example.js index aee49f7..61d475e 100644 --- a/config.example.js +++ b/config.example.js @@ -25,6 +25,11 @@ module.exports = { gcs_keyFilename: "credentials.json", gcs_bucket: "BUCKET_NAME", + // For Azure configuration + azure_account: "STORAGE_ACCOUNT_NAME", + azure_container: "BLOB_CONTAINER", + azure_accessKey: "ACCESS_KEY", + // Or set filesAdapter to a Parse Server file adapter // filesAdapter: new FileAdapter({ // filesSubDirectory: './downloaded_files' diff --git a/lib/index.js b/lib/index.js index 8c51345..620b5fb 100644 --- a/lib/index.js +++ b/lib/index.js @@ -73,7 +73,7 @@ function onlyFiles(schemas) { return schemas.map(function(schema) { var fileFields = Object.keys(schema.fields).filter(function(key){ var value = schema.fields[key]; - return value.type == "File"; + return value.type == "File" || value.type == "Array"; }); if (fileFields.length > 0) { return { @@ -102,6 +102,80 @@ function getAllObjects(baseQuery) { return next(); } +// Cases covered: +// i - It is a single File +// ii - It is a single Array with no files +// iii - It is a single Array with only files +// iv - There are multiple Arrays with only files +// v - There are multiple Arrays with no files +// vi - There are multiple Arrays with file and non-file elements + +var ARRAY_WITH_FILES = 1; +var ARRAY_NO_FILES = 2; +var FILE = 3; + +function check(element, schema) { + var toRet = FILE; + var hasArray = false; + var hasFile = false; + var data = []; + var fields = schema.fields; + + + fields.forEach(function(key) { + if(Array.isArray(element.get(key))) { + hasArray = true; + element.get(key).forEach(function(item) { + if(item instanceof Parse.File) { + hasFile = true; + var fName = item._name; + var fUrl = item._url; + data.push({ + className: schema.className, + objectId: item.id, + fieldName: key, + fileName: fName, + url: fUrl + }); + } + }); + } + }); + + if(hasArray && hasFile) toRet = ARRAY_WITH_FILES; + else if(hasArray) toRet = ARRAY_NO_FILES; + + var returnObj = { + code: toRet, + data: data, + }; + + return returnObj; +} + +function checkArrays(result, schema) { + var status = check(result, schema); + if(status.code === ARRAY_WITH_FILES) { + return status.data + } + else if(status.code === ARRAY_NO_FILES) { + return [] + } + else { + return schema.fields.map(function(field){ + var fName = result.get(field) ? result.get(field).name() : 'DELETE'; + var fUrl = result.get(field) ? result.get(field).url() : 'DELETE'; + return { + className: schema.className, + objectId: result.id, + fieldName: field, + fileName: fName, + url: fUrl + } + }) + } +} + function getObjectsWithFilesFromSchema(schema) { var query = new Parse.Query(schema.className); query.select(schema.fields.concat('createdAt')); @@ -116,17 +190,7 @@ function getObjectsWithFilesFromSchema(schema) { return getAllObjects(query).then(function(results) { return results.reduce(function(current, result){ return current.concat( - schema.fields.map(function(field){ - var fName = result.get(field) ? result.get(field).name() : 'DELETE'; - var fUrl = result.get(field) ? result.get(field).url() : 'DELETE'; - return { - className: schema.className, - objectId: result.id, - fieldName: field, - fileName: fName, - url: fUrl - } - }) + checkArrays(result, schema) ); }, []); }); diff --git a/lib/transfer.js b/lib/transfer.js index 3b62e73..0d8cee5 100644 --- a/lib/transfer.js +++ b/lib/transfer.js @@ -142,7 +142,7 @@ function _processFiles(files, handler) { }, callback) .then(callback, callback); } else { - callback(); + process.nextTick(callback); } }, function(error) { if (error) {