From 1715e9ccf41f8763d3d8ae001fcfde5f20e3906b Mon Sep 17 00:00:00 2001 From: Maciej Krajowski-Kukiel Date: Tue, 13 Jan 2026 19:56:53 +0100 Subject: [PATCH] remove deprecation warnings --- bin/pos-cli-logs.js | 5 +- lib/apiRequest.js | 75 +++++++++++++++-- lib/assets.js | 20 +++-- lib/data/fetchFiles.js | 39 +++++---- lib/directories.js | 6 +- lib/files.js | 2 +- lib/presignUrl.js | 81 +++++++++--------- lib/proxy.js | 4 +- lib/s3UploadFile.js | 84 ++++++++++--------- lib/validators/url.js | 1 - lib/watch.js | 61 ++++++++------ package.json | 22 ++--- test/deploy.test.js | 4 +- .../correct_with_assets/app/assets/bar.js | 1 + .../modules_update/app/pos-modules.json | 2 +- .../modules_update/app/pos-modules.lock.json | 2 +- test/modules-download.test.js | 3 +- test/modules-push.test.js | 2 +- test/sync.test.js | 11 ++- 19 files changed, 259 insertions(+), 166 deletions(-) create mode 100644 test/fixtures/deploy/correct_with_assets/app/assets/bar.js diff --git a/bin/pos-cli-logs.js b/bin/pos-cli-logs.js index 3898304b..f8d56453 100755 --- a/bin/pos-cli-logs.js +++ b/bin/pos-cli-logs.js @@ -1,8 +1,7 @@ #!/usr/bin/env node const EventEmitter = require('events'), - path = require('path'), - url = require('url'); + path = require('path'); const { program } = require('commander'), notifier = require('node-notifier'); @@ -103,7 +102,7 @@ program if (!program.quiet && data) { let parts = []; if (data.url) { - requestUrl = url.parse(`https://${data.url}`); + requestUrl = new URL(`https://${data.url}`); let line = `path: ${requestUrl.pathname}`; if (requestUrl.search) line += `${requestUrl.search}`; parts.push(line); diff --git a/lib/apiRequest.js b/lib/apiRequest.js index 91d4ee8d..d1a153fb 100644 --- a/lib/apiRequest.js +++ b/lib/apiRequest.js @@ -1,14 +1,75 @@ -const requestPromise = require('request-promise'); +const axios = require('axios'); +const FormData = require('form-data'); const logger = require('./logger'); -// const errors = require('request-promise/errors'); -const apiRequest = ({ method = 'GET', uri, body, headers, formData, json = true, forever, request = requestPromise }) => { +const apiRequest = async ({ method = 'GET', uri, body, headers, formData, json = true, forever, request }) => { logger.Debug(`[${method}] ${uri}`); - return request({method, uri, body, headers, formData, json, forever}) - // when we catch the error here we are not able to react to them later - // .catch(errors.StatusCodeError, ServerError.handler) - // .catch(errors.RequestError, ServerError.requestHandler) + try { + // If a custom axios instance is provided, use it + const client = request || axios; + + let config = { + method, + url: uri, + headers: headers || {}, + }; + + if (formData) { + // Convert formData object to FormData instance + const form = new FormData(); + Object.keys(formData).forEach(key => { + form.append(key, formData[key]); + }); + config.data = form; + config.headers = { ...config.headers, ...form.getHeaders() }; + } else if (body) { + config.data = body; + } else if (json && typeof json === 'object') { + // In request-promise, json can be an object to send as request body + config.data = json; + config.headers['Content-Type'] = 'application/json'; + } + + // Handle forever option (keep-alive connections) + if (forever) { + config.timeout = 0; + } + + const response = await client(config); + + // Return data directly if json is truthy, otherwise return full response + return json ? response.data : response; + } catch (error) { + // Re-throw with consistent error structure similar to request-promise + if (error.response) { + // Server responded with error status (StatusCodeError) + const err = new Error(error.response.data || error.message); + err.name = 'StatusCodeError'; + err.statusCode = error.response.status; + err.response = { + body: error.response.data, + statusCode: error.response.status + }; + err.options = { + uri: error.config?.url || uri + }; + err.error = error.response.data; + throw err; + } else if (error.request) { + // Request was made but no response received (RequestError) + const err = new Error(error.message); + err.name = 'RequestError'; + err.cause = error; + err.options = { + uri: error.config?.url || uri + }; + throw err; + } else { + // Network or other error + throw error; + } + } } module.exports = { diff --git a/lib/assets.js b/lib/assets.js index d5535e13..1d591d0e 100644 --- a/lib/assets.js +++ b/lib/assets.js @@ -1,4 +1,4 @@ -const request = require('request-promise'); +const axios = require('axios'); const packAssets = require('./assets/packAssets'), manifestGenerate = require('./assets/manifest').manifestGenerate, logger = require('./logger'), @@ -17,12 +17,18 @@ const waitForUnpack = async fileUrl => { logger.Debug(`Waiting for: ${fileUrl} to be deleted.`); counter += 1; if (fileExists) await sleep(1000); - fileExists = await request - .head(fileUrl) - .then(() => true) - .catch({ statusCode: 403 }, () => false) - .catch({ statusCode: 404 }, () => false) - .catch(error => logger.Error(error)); + + try { + await axios.head(fileUrl); + fileExists = true; + } catch (error) { + if (error.response && (error.response.status === 403 || error.response.status === 404)) { + fileExists = false; + } else { + logger.Error(error); + fileExists = false; + } + } } while (fileExists && counter < 90); }; diff --git a/lib/data/fetchFiles.js b/lib/data/fetchFiles.js index dadc768e..c06a3900 100644 --- a/lib/data/fetchFiles.js +++ b/lib/data/fetchFiles.js @@ -1,6 +1,5 @@ const fs = require('fs'), - request = require('request'), - url = require('url'), + axios = require('axios'), path = require('path'), shell = require('shelljs'), flatten = require('lodash.flatten'), @@ -8,23 +7,33 @@ const fs = require('fs'), logger = require('./../logger'); const CONCURRENCY = 12; -const download = (uri, filename) => { - return new Promise((resolve, reject) => { - request.head(uri, (err, res, body) => { - if (err) { - logger.Warn(err); - reject(err); - } else { - request(uri) - .pipe(fs.createWriteStream(filename)) - .on('close', () => resolve(filename)); - } +const download = async (uri, filename) => { + try { + // Check if file exists first + await axios.head(uri); + + // Download file as stream + const response = await axios({ + method: 'get', + url: uri, + responseType: 'stream' + }); + + return new Promise((resolve, reject) => { + const writer = fs.createWriteStream(filename); + response.data.pipe(writer); + writer.on('finish', () => resolve(filename)); + writer.on('error', reject); }); - }); + } catch (err) { + logger.Warn(err); + throw err; + } }; const filenameForUrl = uri => { - return path.basename(url.parse(uri).pathname); + const urlObj = new URL(uri); + return path.basename(urlObj.pathname); }; const updateItem = (item, newUrl) => { diff --git a/lib/directories.js b/lib/directories.js index 6c1707a6..f8c7023f 100644 --- a/lib/directories.js +++ b/lib/directories.js @@ -15,9 +15,9 @@ const computed = { }; const methods = { - toWatch: () => computed.ALLOWED.filter(fs.existsSync), - currentApp: () => [app.APP, app.LEGACY_APP].filter(fs.existsSync)[0], - available: () => computed.ALLOWED.filter(fs.existsSync) + toWatch: () => computed.ALLOWED.filter(path => path && fs.existsSync(path)), + currentApp: () => [app.APP, app.LEGACY_APP].filter(path => path && fs.existsSync(path))[0], + available: () => computed.ALLOWED.filter(path => path && fs.existsSync(path)) }; module.exports = Object.assign({}, app, internal, computed, methods); diff --git a/lib/files.js b/lib/files.js index e1cdb811..9cd5f127 100644 --- a/lib/files.js +++ b/lib/files.js @@ -14,7 +14,7 @@ const config = { const _paths = customConfig => [customConfig, config.CONFIG, config.LEGACY_CONFIG]; const _getConfigPath = customConfig => { - const firstExistingConfig = _paths(customConfig).filter(fs.existsSync)[0]; + const firstExistingConfig = _paths(customConfig).filter(path => path && fs.existsSync(path))[0]; logger.Debug(`[_getConfigPath] First existing config file: ${firstExistingConfig}`); return path.resolve(firstExistingConfig || config.CONFIG); }; diff --git a/lib/presignUrl.js b/lib/presignUrl.js index a9cf16ed..cfd1d44d 100644 --- a/lib/presignUrl.js +++ b/lib/presignUrl.js @@ -1,13 +1,18 @@ const fs = require('fs'), - url = require('url'), - request = require('request-promise'), + axios = require('axios'), mime = require('mime'), logger = require('./logger'), Portal = require('./portal'); -const deployServiceUrl = () => process.env.DEPLOY_SERVICE_URL || url.resolve(process.env.MARKETPLACE_URL, '/api/private/urls'); +const deployServiceUrl = () => { + if (process.env.DEPLOY_SERVICE_URL) { + return process.env.DEPLOY_SERVICE_URL; + } + const baseUrl = new URL(process.env.MARKETPLACE_URL); + return new URL('/api/private/urls', baseUrl).href; +}; -const presignUrl = (s3FileName, fileName) => { +const presignUrl = async (s3FileName, fileName) => { const serviceUrl = `${deployServiceUrl()}/presign-url`; const params = { fileName: s3FileName, @@ -15,52 +20,46 @@ const presignUrl = (s3FileName, fileName) => { contentType: mime.getType(fileName) }; - return request - .get({ - url: serviceUrl, - headers: { - token: process.env.MARKETPLACE_TOKEN, - marketplace_domain: url.parse(process.env.MARKETPLACE_URL).hostname - }, - qs: params, - json: true - }) - .then(body => { - return { uploadUrl: body.url, accessUrl: url.parse(body.accessUrl).href }; - }); + const marketplaceUrl = new URL(process.env.MARKETPLACE_URL); + const response = await axios.get(serviceUrl, { + headers: { + token: process.env.MARKETPLACE_TOKEN, + marketplace_domain: marketplaceUrl.hostname + }, + params: params + }); + + const accessUrl = new URL(response.data.accessUrl); + return { uploadUrl: response.data.url, accessUrl: accessUrl.href }; }; -const presignDirectory = path => { +const presignDirectory = async path => { const serviceUrl = `${deployServiceUrl()}/presign-directory`; const params = { directory: path }; - return request - .get({ - url: serviceUrl, - headers: { - token: process.env.MARKETPLACE_TOKEN, - marketplace_domain: url.parse(process.env.MARKETPLACE_URL).hostname - }, - qs: params, - json: true - }) - .then(body => body); + const marketplaceUrl = new URL(process.env.MARKETPLACE_URL); + const response = await axios.get(serviceUrl, { + headers: { + token: process.env.MARKETPLACE_TOKEN, + marketplace_domain: marketplaceUrl.hostname + }, + params: params + }); + + return response.data; }; -const presignUrlForPortal = (token, moduleName, filename) => { +const presignUrlForPortal = async (token, moduleName, filename) => { const serviceUrl = `${Portal.url()}/api/pos_modules/${moduleName}/presign_url`; logger.Debug(token); - return request - .get({ - url: serviceUrl, - headers: { - Authorization: `Bearer ${token}` - }, - json: true - }) - .then(body => { - return { uploadUrl: body.upload_url, accessUrl: body.access_url }; - }); + + const response = await axios.get(serviceUrl, { + headers: { + Authorization: `Bearer ${token}` + } + }); + + return { uploadUrl: response.data.upload_url, accessUrl: response.data.access_url }; }; module.exports = { diff --git a/lib/proxy.js b/lib/proxy.js index d09a53f0..7badbd5b 100644 --- a/lib/proxy.js +++ b/lib/proxy.js @@ -1,4 +1,4 @@ -const requestPromise = require('request-promise'); +const axios = require('axios'); const { apiRequest } = require('./apiRequest'); const logger = require('./logger'); @@ -22,7 +22,7 @@ class Gateway { const censored = Object.assign({}, headers, { Authorization: 'Token: ' }); logger.Debug(`Request headers: ${JSON.stringify(censored, null, 2)}`); - this.authorizedRequest = requestPromise.defaults({ headers }); + this.authorizedRequest = axios.create({ headers }); } apiRequest({ method = 'GET', uri, formData, json = true, forever }) { diff --git a/lib/s3UploadFile.js b/lib/s3UploadFile.js index a5a6a326..37c3c87e 100644 --- a/lib/s3UploadFile.js +++ b/lib/s3UploadFile.js @@ -1,53 +1,55 @@ const fs = require('fs'), - request = require('request'), + axios = require('axios'), + FormData = require('form-data'), mime = require('mime'); -const uploadFile = (fileName, s3Url) => { - var stats = fs.statSync(fileName); - return new Promise((resolve, reject) => { - fs.createReadStream(fileName).pipe( - request - .put({ - url: s3Url, - headers: { - 'Content-Length': stats['size'] - } - }) - .on('error', e => reject(e)) - .on('response', response => { - if (response.statusCode >= 200 && response.statusCode < 300) { - resolve(s3Url); - } else { - reject(response.statusCode); - } - }) - ); - }); +const uploadFile = async (fileName, s3Url) => { + const stats = fs.statSync(fileName); + const fileStream = fs.createReadStream(fileName); + + try { + await axios.put(s3Url, fileStream, { + headers: { + 'Content-Length': stats['size'] + }, + maxBodyLength: Infinity, + maxContentLength: Infinity + }); + return s3Url; + } catch (error) { + if (error.response) { + throw error.response.status; + } + throw error; + } }; -const uploadFileFormData = (filePath, data) => { - const formData = {}; +const uploadFileFormData = async (filePath, data) => { + const formData = new FormData(); + + // Add all form fields Object.entries(data.fields).forEach(([k, v]) => { - formData[k] = v; + formData.append(k, v); }); - formData['Content-Type'] = mime.getType(filePath); - formData['file'] = fs.createReadStream(filePath); - return new Promise((resolve, reject) => { - request - .post({ - url: data.url, - formData: formData - }) - .on('error', e => reject(e)) - .on('response', response => { - if (response.statusCode >= 200 && response.statusCode < 300) { - resolve(true); - } else { - reject(response.statusCode); - } - }); + // Add file with content type + formData.append('file', fs.createReadStream(filePath), { + contentType: mime.getType(filePath) }); + + try { + await axios.post(data.url, formData, { + headers: formData.getHeaders(), + maxBodyLength: Infinity, + maxContentLength: Infinity + }); + return true; + } catch (error) { + if (error.response) { + throw error.response.status; + } + throw error; + } }; module.exports = { diff --git a/lib/validators/url.js b/lib/validators/url.js index 01579c37..f002d548 100644 --- a/lib/validators/url.js +++ b/lib/validators/url.js @@ -1,5 +1,4 @@ const logger = require('../logger'); -const URL = require('url').URL; module.exports = string => { try { diff --git a/lib/watch.js b/lib/watch.js index 240241f0..be49dd0b 100644 --- a/lib/watch.js +++ b/lib/watch.js @@ -57,7 +57,8 @@ const pushFile = (gateway, syncedFilePath) => { } if (body) { - logger.Success(`[Sync] Synced: ${filePath}`); + const message = isAssetsPath(syncedFilePath) ? `[Sync] Synced asset: ${syncedFilePath}` : `[Sync] Synced: ${filePath}`; + logger.Success(message); } }).catch(e => { ServerError.handler(e); @@ -78,10 +79,15 @@ const deleteFile = (gateway, syncedFilePath) => { }); }; -const pushFileDirectAssets = (gateway, syncedFilePath) => { - if (isAssetsPath(syncedFilePath)) { - sendAsset(gateway, syncedFilePath); - return Promise.resolve(true); +const pushFileDirectAssets = async (gateway, syncedFilePath) => { + if (isAssetsPath(syncedFilePath) && directUploadData) { + try { + await sendAsset(gateway, syncedFilePath); + } catch (e) { + // If direct upload fails, fall back to legacy sync + logger.Debug(`[Sync] Direct upload failed, falling back to legacy sync`); + return pushFile(gateway, syncedFilePath); + } } else { return pushFile(gateway, syncedFilePath); } @@ -101,23 +107,17 @@ const manifestSend = debounce( const manifestAddAsset = path => manifestFilesToAdd.push(path); const sendAsset = async (gateway, filePath) => { - try { - const data = cloneDeep(directUploadData); - const fileSubdir = filePath.startsWith('app/assets') - ? path.dirname(filePath).replace('app/assets', '') - : '/' + path.dirname(filePath).replace('/public/assets', ''); - const key = data.fields.key.replace('assets/${filename}', `assets${fileSubdir}/\${filename}`); - data.fields.key = key; - logger.Debug(data); - await uploadFileFormData(filePath, data); - manifestAddAsset(filePath); - manifestSend(gateway); - logger.Success(`[Sync] Synced asset: ${filePath}`); - } catch (e) { - logger.Debug(e.message); - logger.Debug(e.stack); - logger.Error(`[Sync] Failed to sync: ${filePath}`); - } + const data = cloneDeep(directUploadData); + const fileSubdir = filePath.startsWith('app/assets') + ? path.dirname(filePath).replace('app/assets', '') + : '/' + path.dirname(filePath).replace('/public/assets', ''); + const key = data.fields.key.replace('assets/${filename}', `assets${fileSubdir}/\${filename}`); + data.fields.key = key; + logger.Debug(data); + await uploadFileFormData(filePath, data); + manifestAddAsset(filePath); + manifestSend(gateway); + logger.Success(`[Sync] Synced asset: ${filePath}`); }; const fetchDirectUploadData = async gateway => { @@ -137,7 +137,6 @@ const start = async (env, directAssetsUpload, liveReload) => { const gateway = new Gateway(program); const ignoreList = files.getIgnoreList(); const push = directAssetsUpload ? pushFileDirectAssets : pushFile; - if (directAssetsUpload) await fetchDirectUploadData(gateway); let liveReloadServer; if (liveReload) { @@ -164,13 +163,25 @@ const start = async (env, directAssetsUpload, liveReload) => { } }, program.concurrency); - return gateway.ping().then(() => { + return gateway.ping().then(async () => { const directories = dir.toWatch(); if (directories.length === 0) { logger.Error(`${dir.APP} or ${dir.MODULES} directory has to exist!`); } + logger.Info(`[Sync] Synchronizing changes to: ${program.url}`); + + if (directAssetsUpload) { + try { + await fetchDirectUploadData(gateway); + } catch (e) { + logger.Error('[Sync] Failed to fetch direct upload configuration. Falling back to standard sync.'); + logger.Debug(e.message); + directUploadData = null; + } + } + chokidar .watch(directories, { ignoreInitial: true, @@ -185,8 +196,6 @@ const start = async (env, directAssetsUpload, liveReload) => { .on('change', fp => shouldBeSynced(fp, ignoreList) && enqueuePush(fp)) .on('add', fp => shouldBeSynced(fp, ignoreList) && enqueuePush(fp)) .on('unlink', fp => shouldBeSynced(fp, ignoreList) && enqueueDelete(fp)); - - logger.Info(`[Sync] Synchronizing changes to: ${program.url}`); }); }; diff --git a/package.json b/package.json index ac39bac6..9c0a4c62 100644 --- a/package.json +++ b/package.json @@ -33,10 +33,11 @@ "commander": "^12.1.0", "degit": "^2.8.4", "email-validator": "^2.0.4", - "express": "^4.17.3", - "fast-glob": "^3.2.11", - "ignore": "^5.2.0", - "inquirer": "^8.2.0", + "express": "^4.22.1", + "fast-glob": "^3.3.3", + "form-data": "^4.0.1", + "ignore": "^5.3.2", + "inquirer": "^8.2.7", "livereload": "^0.9.3", "lodash.clonedeep": "^4.5.0", "lodash.compact": "^3.0.1", @@ -47,15 +48,14 @@ "lodash.startcase": "^4.4.0", "lodash.uniq": "^4.5.0", "mime": "^3.0.0", - "multer": "^1.4.5-lts.1", + "multer": "^1.4.5-lts.2", "mustache": "^4.2.0", "node-notifier": "^10.0.1", - "open": "^10.1.0", - "ora": "^8.0.1", + "open": "^10.2.0", + "ora": "^8.2.0", "prompts": "^2.4.2", - "request": "^2.88.2", - "request-promise": "^4.2.6", - "semver": "^7.3.7", + "axios": "^1.7.9", + "semver": "^7.7.3", "shelljs": "^0.8.5", "text-table": "^0.2.0", "unzipper": "^0.12.3", @@ -88,7 +88,7 @@ }, "homepage": "https://github.com/Platform-OS/pos-cli/issues#readme", "devDependencies": { - "dotenv": "^16.0.0", + "dotenv": "^16.6.1", "jest": "^29.7.0" }, "bundleDependencies": [ diff --git a/test/deploy.test.js b/test/deploy.test.js index 73ecdd38..636e2965 100644 --- a/test/deploy.test.js +++ b/test/deploy.test.js @@ -105,7 +105,7 @@ describe('Server errors', () => { test('Error in form', async () => { const { stderr } = await run('incorrect_form'); expect(stderr).toMatch( - 'Unknown properties: hello. Available properties are: api_call_notifications, async_callback_actions, authorization_policies, body, callback_actions, default_payload, email_notifications, fields, flash_alert, flash_notice, live_reindex, metadata, name, redirect_to, request_allowed, resource, resource_owner, response_headers, return_to, sms_notifications, spam_protection.' + 'Unknown properties in `form_configurations/hello.liquid`: hello. Available properties are: api_call_notifications, async_callback_actions, authorization_policies, body, callback_actions, default_payload, email_notifications, fields, flash_alert, flash_notice, live_reindex, metadata, name, redirect_to, request_allowed, resource, resource_owner, response_headers, return_to, sms_notifications, spam_protection.' ); }); @@ -123,7 +123,7 @@ describe('Server errors', () => { expect(code).toEqual(1); expect(stderr).toMatch( - 'Deploy failed. RequestError: Error: getaddrinfo ENOTFOUND incorrecturl123xyz.com' + 'Deploy failed. RequestError: getaddrinfo ENOTFOUND incorrecturl123xyz.com' ); }); }); diff --git a/test/fixtures/deploy/correct_with_assets/app/assets/bar.js b/test/fixtures/deploy/correct_with_assets/app/assets/bar.js new file mode 100644 index 00000000..8cc7aa3e --- /dev/null +++ b/test/fixtures/deploy/correct_with_assets/app/assets/bar.js @@ -0,0 +1 @@ +console.log('bar'); diff --git a/test/fixtures/deploy/modules_update/app/pos-modules.json b/test/fixtures/deploy/modules_update/app/pos-modules.json index 970703eb..55aee05b 100644 --- a/test/fixtures/deploy/modules_update/app/pos-modules.json +++ b/test/fixtures/deploy/modules_update/app/pos-modules.json @@ -1,5 +1,5 @@ { "modules": { - "core": "1.5.5" + "core": "2.0.7" } } \ No newline at end of file diff --git a/test/fixtures/deploy/modules_update/app/pos-modules.lock.json b/test/fixtures/deploy/modules_update/app/pos-modules.lock.json index 970703eb..55aee05b 100644 --- a/test/fixtures/deploy/modules_update/app/pos-modules.lock.json +++ b/test/fixtures/deploy/modules_update/app/pos-modules.lock.json @@ -1,5 +1,5 @@ { "modules": { - "core": "1.5.5" + "core": "2.0.7" } } \ No newline at end of file diff --git a/test/modules-download.test.js b/test/modules-download.test.js index 77de65bb..f59f09e5 100644 --- a/test/modules-download.test.js +++ b/test/modules-download.test.js @@ -135,6 +135,7 @@ describe('Failed download', () => { }); test('Unescaped characters in request path', async () => { const { stderr } = await run('deploy/modules_test', 'ąę'); - expect(stderr).toMatch('[ERR_UNESCAPED_CHARACTERS]: Request path contains unescaped characters'); + // axios automatically encodes URLs, so we get a 404 instead of an encoding error + expect(stderr).toMatch('404 not found'); }); }); diff --git a/test/modules-push.test.js b/test/modules-push.test.js index 0db255cd..9f0ee947 100644 --- a/test/modules-push.test.js +++ b/test/modules-push.test.js @@ -45,7 +45,7 @@ describe('Server errors', () => { const { stdout, stderr } = await run('good', '--email foo@example.com'); expect(stderr).toMatch('Cannot find modules/pos_cli_ci_test, creating archive with the current directory'); expect(stderr).toMatch('You are unauthorized to do this operation. Check if your Token/URL or email/password are correct.'); - }); + }, 30000); test('Wrong version', async () => { const { stdout, stderr } = await run('good', '--email pos-cli-ci@platformos.com'); diff --git a/test/sync.test.js b/test/sync.test.js index d90f5738..8974dc17 100644 --- a/test/sync.test.js +++ b/test/sync.test.js @@ -3,8 +3,9 @@ const exec = require('./utils/exec'); const cliPath = require('./utils/cliPath'); const path = require('path'); +const fs = require('fs'); -const stepTimeout = 3500; +const stepTimeout = 5000; require('dotenv').config(); @@ -23,11 +24,17 @@ jest.setTimeout(20000); // default jasmine timeout is 5 seconds - we need more. const kill = p => { p.stdout.destroy(); p.stderr.destroy(); - p.kill() + p.kill(); } jest.retryTimes(2); +// Reset bar.js to its original state after all tests +afterAll(() => { + const barJsPath = path.join(cwd('correct_with_assets'), 'app/assets/bar.js'); + fs.writeFileSync(barJsPath, "console.log('bar');\n"); +}); + describe('Happy path', () => { test('sync assets', async () => {