diff --git a/.env b/.env new file mode 100644 index 0000000..ad2b912 --- /dev/null +++ b/.env @@ -0,0 +1,17 @@ +# CSV Publisher Configuration +# This file is for local testing only! +# $ node csvPublisher.js --publishArtifacts=true + +# Platform URL +GALLOPER_URL= + +# Project ID +project_id= + +# Authentication token +token= +# Results bucket name +RESULTS_BUCKET= + +# Report ID (will be used as zip file name - e.g., test_results.zip) +REPORT_ID= diff --git a/csvPublisher.js b/csvPublisher.js new file mode 100644 index 0000000..797ec4a --- /dev/null +++ b/csvPublisher.js @@ -0,0 +1,336 @@ +#!/usr/bin/env node + +// Optional dotenv import - only if available (for local testing) +try { + await import('dotenv/config'); +} catch (e) { + // dotenv not available, continue without it +} + +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import archiver from 'archiver'; + +const __filename = fileURLToPath(import.meta.url); + +/** + * CSV Publisher - Zips all files from /reports folder and publishes to Carrier platform + */ +class CSVPublisher { + constructor(config) { + // Use environment variables or .env values as defaults + this.url = config.url || process.env.GALLOPER_URL || ''; + this.projectId = config.projectId || process.env.project_id || ''; + this.authToken = config.authToken || process.env.token || ''; + this.bucket = config.bucket || process.env.RESULTS_BUCKET || ''; + this.reportId = config.reportId || process.env.REPORT_ID || ''; + this.verbose = config.verbose || false; + } + + /** + * Validate configuration + */ + validate() { + const errors = []; + + if (!this.url) errors.push('url (GALLOPER_URL)'); + if (!this.projectId) errors.push('project_id'); + if (!this.authToken) errors.push('auth_token (token)'); + if (!this.bucket) errors.push('bucket (RESULTS_BUCKET)'); + if (!this.reportId) errors.push('reportId (REPORT_ID)'); + + if (errors.length > 0) { + throw new Error(`Missing required configuration: ${errors.join(', ')}`); + } + + return true; + } + + /** + * Zip all files from reports folder + */ + async zipReportsFolder() { + const reportsDir = path.join(process.cwd(), 'reports'); + + // Validate reports folder exists + if (!fs.existsSync(reportsDir)) { + throw new Error(`Reports folder not found: ${reportsDir}`); + } + + // Check if there are files in reports folder + const files = fs.readdirSync(reportsDir).filter(file => { + const filePath = path.join(reportsDir, file); + return fs.statSync(filePath).isFile(); + }); + + if (files.length === 0) { + throw new Error('No files found in /reports folder to zip'); + } + + if (this.verbose) { + console.log(`ļæ½ Found ${files.length} files in /reports folder`); + files.forEach(file => console.log(` - ${file}`)); + } + + // Ensure reportId has .zip extension + const zipFileName = this.reportId.endsWith('.zip') ? this.reportId : `${this.reportId}.zip`; + const zipFilePath = path.join(process.cwd(), zipFileName); + + // Remove existing zip file if it exists + if (fs.existsSync(zipFilePath)) { + fs.unlinkSync(zipFilePath); + if (this.verbose) { + console.log(`šŸ—‘ļø Removed existing zip file: ${zipFileName}`); + } + } + + if (this.verbose) { + console.log(`šŸ“¦ Creating zip file: ${zipFileName}`); + } + + return new Promise((resolve, reject) => { + try { + // Create write stream for the zip file + const output = fs.createWriteStream(zipFilePath); + const archive = archiver('zip', { + zlib: { level: 9 } // Maximum compression + }); + + // Handle stream events + output.on('close', () => { + const zipStats = fs.statSync(zipFilePath); + if (this.verbose) { + console.log(`āœ… Zip file created successfully (${zipStats.size} bytes)`); + } + resolve(zipFilePath); + }); + + output.on('error', (err) => { + reject(new Error(`Failed to write zip file: ${err.message}`)); + }); + + archive.on('error', (err) => { + reject(new Error(`Failed to create zip archive: ${err.message}`)); + }); + + archive.on('warning', (err) => { + if (err.code === 'ENOENT') { + console.warn('Archive warning:', err); + } else { + reject(err); + } + }); + + // Pipe archive data to the file + archive.pipe(output); + + // Add all files from reports directory + archive.directory(reportsDir, false); + + // Finalize the archive + archive.finalize(); + + } catch (error) { + reject(new Error(`Failed to create zip file: ${error.message}`)); + } + }); + } + + /** + * Publish zip file to Carrier platform + */ + async publish() { + try { + this.validate(); + + if (this.verbose) { + console.log('šŸš€ Starting reports artifact publishing...'); + console.log(`šŸ†” Report ID: ${this.reportId}`); + console.log(`🪣 Bucket: ${this.bucket}`); + console.log(`šŸ¢ Project ID: ${this.projectId}`); + console.log(`🌐 URL: ${this.url}`); + } + + // Step 1: Zip all files from reports folder + const zipFilePath = await this.zipReportsFolder(); + const zipFileName = path.basename(zipFilePath); + + // Step 2: Upload zip file + const fileStats = fs.statSync(zipFilePath); + if (this.verbose) { + console.log(`šŸ“¤ Uploading: ${zipFileName} (${fileStats.size} bytes)`); + } + + // Construct upload URL + const uploadUrl = `${this.url.replace(/\/$/, '')}/api/v1/artifacts/artifacts/default/${this.projectId}/${this.bucket}?integration_id=1&is_local=false`; + + // Read file and create form data + const fileBuffer = fs.readFileSync(zipFilePath); + const fileBlob = new Blob([fileBuffer], { type: 'application/zip' }); + + const formData = new FormData(); + formData.append('file', fileBlob, zipFileName); + + // Make upload request + const response = await fetch(uploadUrl, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${this.authToken}` + }, + body: formData + }); + + const responseBody = await response.text(); + + // Step 3: Clean up - remove zip file after upload + if (fs.existsSync(zipFilePath)) { + fs.unlinkSync(zipFilePath); + if (this.verbose) { + console.log(`šŸ—‘ļø Cleaned up temporary zip file: ${zipFileName}`); + } + } + + if (response.status === 200) { + if (this.verbose) { + console.log('āœ… Zip file uploaded successfully'); + console.log(`šŸ”— File location: ${this.url}/api/v1/artifacts/artifact/default/${this.projectId}/${this.bucket}/${zipFileName}`); + } + + return { + success: true, + fileName: zipFileName, + bucket: this.bucket, + projectId: this.projectId, + fileUrl: `${this.url}/api/v1/artifacts/artifact/default/${this.projectId}/${this.bucket}/${zipFileName}` + }; + } else { + throw new Error(`Upload failed. Status: ${response.status}, Response: ${responseBody}`); + } + + } catch (error) { + if (this.verbose) { + console.error(`āŒ Error: ${error.message}`); + } + return { + success: false, + error: error.message + }; + } + } +} + +/** + * Parse command line arguments + */ +function parseArguments() { + const args = {}; + process.argv.slice(2).forEach(arg => { + const [key, value] = arg.split('='); + if (key.startsWith('--')) { + const cleanKey = key.substring(2).toLowerCase(); + args[cleanKey] = value === 'true' ? true : value === 'false' ? false : value || true; + } + }); + return args; +} + +/** + * Display usage information + */ +function showUsage() { + console.log(` +šŸš€ CSV Publisher - Zip and Publish reports folder contents + +Usage: node csvPublisher.js --publishArtifacts=true/false + +Options: + --publishArtifacts=true/false Enable/disable publishing (required) + +Configuration (via environment variables): + GALLOPER_URL Platform URL (e.g., https://platform.getcarrier.io) + project_id Project identifier + token Authentication token + RESULTS_BUCKET Target bucket name + REPORT_ID Zip file name (e.g., 33a83cbd-9cfe-4385-87c2-37ae2762a7e3) + +How it works: + 1. Zips all files from /reports folder + 2. Names the zip file using REPORT_ID value (e.g., REPORT_ID.zip) + 3. Uploads to the specified bucket + 4. Cleans up temporary zip file + +For local testing: + Create a .env file in the project root with the above variables + +Examples: + # Publish reports (using environment variables or .env) + node csvPublisher.js --publishArtifacts=true + + # Skip publishing + node csvPublisher.js --publishArtifacts=false + + # In Docker container (environment variables are automatically available) + # REPORT_ID=33a83cbd-9cfe-4385-87c2-37ae2762a7e3 + node csvPublisher.js --publishArtifacts=true + # Creates: 33a83cbd-9cfe-4385-87c2-37ae2762a7e3.zip +`); +} + +/** + * Main CLI execution function + */ +async function runCLI() { + const args = parseArguments(); + + // Show help if requested + if (args.help || args.h) { + showUsage(); + process.exit(0); + } + + // Check required argument + if (args.publishartifacts === undefined) { + console.error('āŒ Error: --publishArtifacts=true/false argument is required'); + showUsage(); + process.exit(1); + } + + // Skip if publishArtifacts is false + if (args.publishartifacts === false) { + console.log('ā„¹ļø Reports publishing is disabled (publishArtifacts=false)'); + process.exit(0); + } + + try { + const config = { + verbose: true + }; + + const publisher = new CSVPublisher(config); + const result = await publisher.publish(); + + if (result.success) { + console.log(`\nšŸŽ‰ Successfully published reports as: ${result.fileName}`); + console.log(`šŸ”— Access URL: ${result.fileUrl}`); + process.exit(0); + } else { + console.error(`\nāŒ Publishing failed: ${result.error}`); + process.exit(1); + } + + } catch (error) { + console.error(`šŸ’„ Fatal error: ${error.message}`); + process.exit(1); + } +} + +// Check if this file is being run directly (not imported) +if (process.argv[1] === __filename) { + runCLI().catch(error => { + console.error('šŸ’„ Unexpected error:', error); + process.exit(1); + }); +} + +export { CSVPublisher }; diff --git a/package-lock.json b/package-lock.json index 283f75b..ccf7908 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,8 +9,10 @@ "version": "12.8.2", "license": "Apache-2.0", "dependencies": { + "archiver": "^7.0.1", "chai": "^6.2.0", "csv-parser": "^3.2.0", + "dotenv": "^16.4.5", "es-main": "^1.4.0", "express": "^5.1.0", "lh-pptr-framework": "1.2.8", @@ -1659,6 +1661,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "license": "MIT", diff --git a/package.json b/package.json index 9c06c6b..b263534 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "lighthouse": "^12.8.2", "puppeteer": "^24.19.0", "chai": "^6.2.0", + "dotenv": "^16.4.5", "es-main": "^1.4.0", "express": "^5.1.0", "logform": "^2.7.0", @@ -21,7 +22,8 @@ "puppeteer-har": "^1.1.2", "winston": "^3.18.3", "csv-parser": "^3.2.0", - "request": "^2.88.2" + "request": "^2.88.2", + "archiver": "^7.0.1" }, "license": "Apache-2.0" } diff --git a/test/demo.test.steps.js b/test/demo.test.steps.js index 44d75e5..96c1b57 100644 --- a/test/demo.test.steps.js +++ b/test/demo.test.steps.js @@ -22,7 +22,19 @@ const customBeforeHook = async () => { before(customBeforeHook); beforeEach(beforeEachHook); afterEach(afterEachHook); -after(afterHook); + +after(async function() { + await afterHook.call(this); + + // Check if publishArtifacts flag is passed via command line + const publishArtifacts = process.argv.includes('--publishArtifacts=true'); + + if (publishArtifacts) { + const { CSVPublisher } = await import('../csvPublisher.js'); + const publisher = new CSVPublisher({ verbose: true }); + await publisher.publish(); + } +}); it(`[N]_${Home.getURL()}`, async function () { await Home.navigationValidate(browser, this)