diff --git a/app.js b/app.js index 38b399f..53261c8 100644 --- a/app.js +++ b/app.js @@ -11,18 +11,60 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. - +const { Storage } = require('@google-cloud/storage'); 'use strict'; +const bucketName = "documents_database" +const storage = new Storage(); // [START gae_flex_quickstart] const express = require('express'); const app = express(); +async function downloadFile(fileName, destFileName) { + const options = { + destination: destFileName, + }; + + const bucket = storage.bucket(bucketName) + // Downloads the file + await bucket.file(fileName).download(options); + + console.log( + `gs://${bucketName}/${fileName} downloaded to ${destFileName}.` + ); +} + + app.get('/', (req, res) => { res.status(200).send('Hello, world!').end(); }); +app.get('/download', async (req, res) => { + const { filename, destination } = req.query; + + if (!filename) { + return res.status(400).send('Filename is required.'); + } + + const file = storage.bucket(bucketName).file(filename); + + try { + const exists = await file.exists(); + if (exists[0]) { + const destFileName = destination ? destination : `./${filename}`; + await downloadFile(filename, destFileName); + res.status(200).send(`File ${filename} downloaded successfully to ${destFileName}.`); + } else { + res.status(404).send(`File ${filename} not found.`); + } + } catch (err) { + console.error('Error:', err); + res.status(500).send('Internal Server Error'); + } +}); + + // Start the server const PORT = parseInt(process.env.PORT) || 8083; app.listen(PORT, () => { @@ -30,5 +72,5 @@ app.listen(PORT, () => { console.log('Press Ctrl+C to quit.'); }); // [END gae_flex_quickstart] - + module.exports = app; diff --git a/fisier_exemplu.txt b/fisier_exemplu.txt new file mode 100644 index 0000000..6d12755 --- /dev/null +++ b/fisier_exemplu.txt @@ -0,0 +1,3 @@ +exemplu text +123 +a b c \ No newline at end of file diff --git a/node_modules/.bin/fxparser b/node_modules/.bin/fxparser new file mode 100644 index 0000000..a423d59 --- /dev/null +++ b/node_modules/.bin/fxparser @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../fast-xml-parser/src/cli/cli.js" "$@" +else + exec node "$basedir/../fast-xml-parser/src/cli/cli.js" "$@" +fi diff --git a/node_modules/.bin/fxparser.cmd b/node_modules/.bin/fxparser.cmd new file mode 100644 index 0000000..043b763 --- /dev/null +++ b/node_modules/.bin/fxparser.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\fast-xml-parser\src\cli\cli.js" %* diff --git a/node_modules/.bin/fxparser.ps1 b/node_modules/.bin/fxparser.ps1 new file mode 100644 index 0000000..3e7252a --- /dev/null +++ b/node_modules/.bin/fxparser.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../fast-xml-parser/src/cli/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../fast-xml-parser/src/cli/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../fast-xml-parser/src/cli/cli.js" $args + } else { + & "node$exe" "$basedir/../fast-xml-parser/src/cli/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/uuid b/node_modules/.bin/uuid new file mode 100644 index 0000000..c3ec003 --- /dev/null +++ b/node_modules/.bin/uuid @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../uuid/dist/bin/uuid" "$@" +else + exec node "$basedir/../uuid/dist/bin/uuid" "$@" +fi diff --git a/node_modules/.bin/uuid.cmd b/node_modules/.bin/uuid.cmd new file mode 100644 index 0000000..0f2376e --- /dev/null +++ b/node_modules/.bin/uuid.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\uuid\dist\bin\uuid" %* diff --git a/node_modules/.bin/uuid.ps1 b/node_modules/.bin/uuid.ps1 new file mode 100644 index 0000000..7804628 --- /dev/null +++ b/node_modules/.bin/uuid.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 94bcbae..21e663a 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -10,6 +10,72 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, + "node_modules/@google-cloud/paginator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.0.tgz", + "integrity": "sha512-87aeg6QQcEPxGCOthnpUjvw4xAZ57G7pL8FS0C4e/81fr3FjkpUpibf1s2v5XGyGhUVGF4Jfg7yEcxqn2iUw1w==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.9.0.tgz", + "integrity": "sha512-PlFl7g3r91NmXtZHXsSEfTZES5ysD3SSBWmX4iBdQ2TFH7tN/Vn/IhnVELCHtgh1vc+uYPZ7XvRYaqtDCdghIA==", + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.1.3", + "ent": "^2.2.0", + "fast-xml-parser": "^4.3.0", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", @@ -44,12 +110,73 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", "dev": true }, + "node_modules/@types/node": { + "version": "20.12.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.2.tgz", + "integrity": "sha512-zQ0NYO87hyN6Xrclcqp7f8ZbXNbRfoGWNcMvHTPQp9UUrwI0mI7XBz+cu7/W6/VClYo2g63B0cjull/srU7LgQ==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/request/node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -62,6 +189,38 @@ "node": ">= 0.6" } }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/agent-base/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/agent-base/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", @@ -119,17 +278,32 @@ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", "dev": true }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dependencies": { + "retry": "0.13.1" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/balanced-match": { "version": "1.0.2", @@ -137,6 +311,33 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "engines": { + "node": "*" + } + }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", @@ -199,6 +400,11 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -342,7 +548,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -359,6 +564,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -463,7 +679,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, "engines": { "node": ">=0.4.0" } @@ -504,6 +719,25 @@ "node": ">=0.3.1" } }, + "node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -523,6 +757,19 @@ "node": ">= 0.8" } }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + }, "node_modules/es-define-property": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", @@ -576,6 +823,14 @@ "node": ">= 0.6" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/express": { "version": "4.19.2", "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", @@ -617,12 +872,38 @@ "node": ">= 0.10.0" } }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true }, + "node_modules/fast-xml-parser": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.3.6.tgz", + "integrity": "sha512-M2SovcRxD4+vC493Uc2GZVcZaj66CCJhWurC4viynVSTvrpErCShNcDz1lAho6n9REQKvL/ll4A4/fw6Y9z8nw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, "node_modules/fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", @@ -749,6 +1030,32 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gaxios": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.3.0.tgz", + "integrity": "sha512-p+ggrQw3fBwH2F5N/PAI4k/G/y1art5OxKpb2J2chwNNHM4hHuAOtivjPuirMF4KNKwTTUal/lPfL2+7h2mEcg==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -807,6 +1114,22 @@ "node": ">= 6" } }, + "node_modules/google-auth-library": { + "version": "9.7.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.7.0.tgz", + "integrity": "sha512-I/AvzBiUXDzLOy4iIZ2W+Zq33W4lcukQv1nl7C8WUA6SQwyQwUwu3waNmWNAvzds//FG8SZ+DnKnW/2k6mQS8A==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/gopd": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", @@ -818,6 +1141,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -910,6 +1245,84 @@ "node": ">= 0.8" } }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -1004,6 +1417,17 @@ "node": ">=8" } }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", @@ -1070,6 +1494,33 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -1321,6 +1772,25 @@ "node": ">= 0.6" } }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -1353,7 +1823,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "dependencies": { "wrappy": "1" } @@ -1362,7 +1831,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -1497,6 +1965,19 @@ "node": ">= 0.8" } }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -1518,6 +1999,27 @@ "node": ">=0.10.0" } }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -1738,6 +2240,27 @@ "node": ">= 0.8" } }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==" + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -1776,6 +2299,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, "node_modules/superagent": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.1.2.tgz", @@ -1857,6 +2390,77 @@ "node": ">=8" } }, + "node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/teeny-request/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -1933,6 +2537,11 @@ "node": ">=0.6" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1945,6 +2554,11 @@ "node": ">= 0.6" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -1953,6 +2567,11 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1961,6 +2580,14 @@ "node": ">= 0.4.0" } }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/v8-to-istanbul": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz", @@ -1983,6 +2610,20 @@ "node": ">= 0.8" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -2024,8 +2665,7 @@ "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/y18n": { "version": "5.0.8", @@ -2088,7 +2728,6 @@ "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, "engines": { "node": ">=10" }, diff --git a/node_modules/@google-cloud/paginator/CHANGELOG.md b/node_modules/@google-cloud/paginator/CHANGELOG.md new file mode 100644 index 0000000..1ad97fe --- /dev/null +++ b/node_modules/@google-cloud/paginator/CHANGELOG.md @@ -0,0 +1,248 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/nodejs-paginator?activeTab=versions + +## [5.0.0](https://github.com/googleapis/nodejs-paginator/compare/v4.0.1...v5.0.0) (2023-08-09) + + +### ⚠ BREAKING CHANGES + +* update to Node 14 ([#346](https://github.com/googleapis/nodejs-paginator/issues/346)) + +### Miscellaneous Chores + +* Update to Node 14 ([#346](https://github.com/googleapis/nodejs-paginator/issues/346)) ([262ad70](https://github.com/googleapis/nodejs-paginator/commit/262ad70d3cc5e1aa8a67ece54c04920b24ceea09)) + +## [4.0.1](https://github.com/googleapis/nodejs-paginator/compare/v4.0.0...v4.0.1) (2022-09-09) + + +### Bug Fixes + +* Remove pip install statements ([#1546](https://github.com/googleapis/nodejs-paginator/issues/1546)) ([#329](https://github.com/googleapis/nodejs-paginator/issues/329)) ([697567b](https://github.com/googleapis/nodejs-paginator/commit/697567bdd86226b740304734b9562a2f2241a96f)) + +## [4.0.0](https://github.com/googleapis/nodejs-paginator/compare/v3.0.7...v4.0.0) (2022-05-17) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#325) + +### Build System + +* update library to use Node 12 ([#325](https://github.com/googleapis/nodejs-paginator/issues/325)) ([02887ae](https://github.com/googleapis/nodejs-paginator/commit/02887ae2b370bff18cae7fe1d434ecdf663b5748)) + +### [3.0.7](https://github.com/googleapis/nodejs-paginator/compare/v3.0.6...v3.0.7) (2022-02-14) + + +### Bug Fixes + +* update signature of end to comply with update node types definition ([#311](https://github.com/googleapis/nodejs-paginator/issues/311)) ([79e6fbd](https://github.com/googleapis/nodejs-paginator/commit/79e6fbdae5008d874613d2919a6cf723708fc919)) + +### [3.0.6](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.5...v3.0.6) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#287](https://www.github.com/googleapis/nodejs-paginator/issues/287)) ([1b796f3](https://www.github.com/googleapis/nodejs-paginator/commit/1b796f3377174354a62b7475d16f52213197f650)) + +### [3.0.5](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.4...v3.0.5) (2020-09-02) + + +### Bug Fixes + +* add configs by running synthtool ([#241](https://www.github.com/googleapis/nodejs-paginator/issues/241)) ([643593a](https://www.github.com/googleapis/nodejs-paginator/commit/643593ae9ffb8febff69a7bdae19239f5bcb1266)) + +### [3.0.4](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.3...v3.0.4) (2020-08-06) + + +### Bug Fixes + +* destroy ResourceStream with pre-flight error ([#236](https://www.github.com/googleapis/nodejs-paginator/issues/236)) ([d57beb4](https://www.github.com/googleapis/nodejs-paginator/commit/d57beb424d875a7bf502d458cc208f1bbe47a42a)) + +### [3.0.3](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.2...v3.0.3) (2020-07-24) + + +### Bug Fixes + +* move gitattributes files to node templates ([#234](https://www.github.com/googleapis/nodejs-paginator/issues/234)) ([30e881c](https://www.github.com/googleapis/nodejs-paginator/commit/30e881ce7415749b93b6b7e4e71745ea3fb248b6)) + +### [3.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.1...v3.0.2) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#221](https://www.github.com/googleapis/nodejs-paginator/issues/221)) ([088153c](https://www.github.com/googleapis/nodejs-paginator/commit/088153c4fca6d53e2e5ef4bb42365ce5493b913d)) + +### [3.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v3.0.0...v3.0.1) (2020-05-20) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-paginator/issues/468)) ([#211](https://www.github.com/googleapis/nodejs-paginator/issues/211)) ([f343b7f](https://www.github.com/googleapis/nodejs-paginator/commit/f343b7f7e184fd1b453f20ac1463d17520aac7ad)) + +## [3.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.3...v3.0.0) (2020-03-25) + + +### ⚠ BREAKING CHANGES + +* **dep:** upgrade gts 2.0.0 (#194) +* **deps:** deprecated node 8 to 10; upgrade typescript + +### Miscellaneous Chores + +* **dep:** upgrade gts 2.0.0 ([#194](https://www.github.com/googleapis/nodejs-paginator/issues/194)) ([4eaf9be](https://www.github.com/googleapis/nodejs-paginator/commit/4eaf9bed1fcfd0f10e877ff15c1d0e968e3356c8)) +* **deps:** deprecated node 8 to 10; upgrade typescript ([f6434ab](https://www.github.com/googleapis/nodejs-paginator/commit/f6434ab9cacb6ab804c070f19c38b6072ca326b5)) + +### [2.0.3](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.2...v2.0.3) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e06e1b0](https://www.github.com/googleapis/nodejs-paginator/commit/e06e1b0a2e2bb1cf56fc806c1703b8b5e468b954)) + +### [2.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.1...v2.0.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#155](https://www.github.com/googleapis/nodejs-paginator/issues/155)) ([b983799](https://www.github.com/googleapis/nodejs-paginator/commit/b98379905848fd179c6268aff3e1cfaf2bf76663)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v2.0.0...v2.0.1) (2019-08-25) + + +### Bug Fixes + +* **deps:** use the latest extend ([#141](https://www.github.com/googleapis/nodejs-paginator/issues/141)) ([61b383e](https://www.github.com/googleapis/nodejs-paginator/commit/61b383e)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.2...v2.0.0) (2019-07-12) + + +### ⚠ BREAKING CHANGES + +* rewrite streaming logic (#136) + +### Code Refactoring + +* rewrite streaming logic ([#136](https://www.github.com/googleapis/nodejs-paginator/issues/136)) ([641d82d](https://www.github.com/googleapis/nodejs-paginator/commit/641d82d)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.1...v1.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#132](https://www.github.com/googleapis/nodejs-paginator/issues/132)) ([be231be](https://www.github.com/googleapis/nodejs-paginator/commit/be231be)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-paginator/compare/v1.0.0...v1.0.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#129](https://www.github.com/googleapis/nodejs-paginator/issues/129)) ([689f483](https://www.github.com/googleapis/nodejs-paginator/commit/689f483)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-paginator/compare/v0.2.0...v1.0.0) (2019-05-03) + + +### Bug Fixes + +* **deps:** update dependency arrify to v2 ([#109](https://www.github.com/googleapis/nodejs-paginator/issues/109)) ([9f06c83](https://www.github.com/googleapis/nodejs-paginator/commit/9f06c83)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#115](https://www.github.com/googleapis/nodejs-paginator/issues/115)) ([0921076](https://www.github.com/googleapis/nodejs-paginator/commit/0921076)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#115) + +## v0.2.0 + +03-08-2019 12:15 PST + +### New Features +- feat: handle promise based functions ([#91](https://github.com/googleapis/nodejs-paginator/pull/91)) +- refactor(ts): create generic for object streams ([#101](https://github.com/googleapis/nodejs-paginator/pull/101)) + +### Dependencies +- chore(deps): update dependency through2 to v3 ([#53](https://github.com/googleapis/nodejs-paginator/pull/53)) +- chore(deps): update dependency @types/is to v0.0.21 ([#55](https://github.com/googleapis/nodejs-paginator/pull/55)) +- chore(deps): update dependency gts to ^0.9.0 ([#57](https://github.com/googleapis/nodejs-paginator/pull/57)) +- fix: Pin @types/sinon to last compatible version ([#61](https://github.com/googleapis/nodejs-paginator/pull/61)) +- refactor: trim a few dependencies ([#60](https://github.com/googleapis/nodejs-paginator/pull/60)) +- chore(deps): update dependency @types/sinon to v5.0.7 ([#62](https://github.com/googleapis/nodejs-paginator/pull/62)) +- chore(deps): update dependency @types/sinon to v7 ([#81](https://github.com/googleapis/nodejs-paginator/pull/81)) +- chore(deps): update dependency mocha to v6 + +### Documentation +- docs: add lint/fix example to contributing guide ([#85](https://github.com/googleapis/nodejs-paginator/pull/85)) +- chore: move CONTRIBUTING.md to root ([#87](https://github.com/googleapis/nodejs-paginator/pull/87)) +- docs: update links in contrib guide ([#94](https://github.com/googleapis/nodejs-paginator/pull/94)) +- docs: update contributing path in README ([#88](https://github.com/googleapis/nodejs-paginator/pull/88)) + +### Internal / Testing Changes +- chore: include build in eslintignore ([#49](https://github.com/googleapis/nodejs-paginator/pull/49)) +- chore: update CircleCI config ([#52](https://github.com/googleapis/nodejs-paginator/pull/52)) +- chore: use latest npm on Windows ([#54](https://github.com/googleapis/nodejs-paginator/pull/54)) +- chore: update eslintignore config ([#56](https://github.com/googleapis/nodejs-paginator/pull/56)) +- chore: add synth.metadata +- fix(build): fix system key decryption ([#64](https://github.com/googleapis/nodejs-paginator/pull/64)) +- chore: update license file ([#68](https://github.com/googleapis/nodejs-paginator/pull/68)) +- chore(build): update prettier config ([#69](https://github.com/googleapis/nodejs-paginator/pull/69)) +- chore: nyc ignore build/test by default ([#71](https://github.com/googleapis/nodejs-paginator/pull/71)) +- chore: always nyc report before calling codecov ([#72](https://github.com/googleapis/nodejs-paginator/pull/72)) +- build: add Kokoro configs for autorelease ([#75](https://github.com/googleapis/nodejs-paginator/pull/75)) +- fix(build): fix Kokoro release script ([#76](https://github.com/googleapis/nodejs-paginator/pull/76)) +- chore: fix publish.sh permission +x ([#77](https://github.com/googleapis/nodejs-paginator/pull/77)) +- chore: update nyc and eslint configs ([#79](https://github.com/googleapis/nodejs-paginator/pull/79)) +- chore(build): inject yoshi automation key ([#80](https://github.com/googleapis/nodejs-paginator/pull/80)) +- build: check broken links in generated docs ([#82](https://github.com/googleapis/nodejs-paginator/pull/82)) +- build: ignore googleapis.com in doc link check ([#84](https://github.com/googleapis/nodejs-paginator/pull/84)) +- build: test using @grpc/grpc-js in CI ([#89](https://github.com/googleapis/nodejs-paginator/pull/89)) +- build: create docs test npm scripts ([#90](https://github.com/googleapis/nodejs-paginator/pull/90)) +- build: use linkinator for docs test ([#93](https://github.com/googleapis/nodejs-paginator/pull/93)) +- build: update release configuration +- build: fix types for sinon ([#98](https://github.com/googleapis/nodejs-paginator/pull/98)) +- build: use node10 to run samples-test, system-test etc ([#97](https://github.com/googleapis/nodejs-paginator/pull/97)) +- build: Add docuploader credentials to node publish jobs ([#99](https://github.com/googleapis/nodejs-paginator/pull/99)) + +## v0.1.2 + +### Bug fixes +- fix: call limiter.makeRequest() instead of original method ([#43](https://github.com/googleapis/nodejs-paginator/pull/43)) + +### Internal / Testing Changes +- chore: update issue templates ([#42](https://github.com/googleapis/nodejs-paginator/pull/42)) +- chore: remove old issue template ([#40](https://github.com/googleapis/nodejs-paginator/pull/40)) +- build: run tests on node11 ([#39](https://github.com/googleapis/nodejs-paginator/pull/39)) +- chores(build): run codecov on continuous builds ([#36](https://github.com/googleapis/nodejs-paginator/pull/36)) +- chores(build): do not collect sponge.xml from windows builds ([#37](https://github.com/googleapis/nodejs-paginator/pull/37)) +- chore: update new issue template ([#35](https://github.com/googleapis/nodejs-paginator/pull/35)) +- chore(deps): update dependency sinon to v7 ([#31](https://github.com/googleapis/nodejs-paginator/pull/31)) +- build: fix codecov uploading on Kokoro ([#32](https://github.com/googleapis/nodejs-paginator/pull/32)) +- Update kokoro config ([#29](https://github.com/googleapis/nodejs-paginator/pull/29)) +- Update CI config ([#27](https://github.com/googleapis/nodejs-paginator/pull/27)) +- Don't publish sourcemaps ([#25](https://github.com/googleapis/nodejs-paginator/pull/25)) +- build: prevent system/sample-test from leaking credentials +- Update kokoro config ([#23](https://github.com/googleapis/nodejs-paginator/pull/23)) +- test: remove appveyor config ([#22](https://github.com/googleapis/nodejs-paginator/pull/22)) +- Update CI config ([#21](https://github.com/googleapis/nodejs-paginator/pull/21)) +- Enable prefer-const in the eslint config ([#20](https://github.com/googleapis/nodejs-paginator/pull/20)) +- Enable no-var in eslint ([#19](https://github.com/googleapis/nodejs-paginator/pull/19)) +- Update CI config ([#18](https://github.com/googleapis/nodejs-paginator/pull/18)) + +## v0.1.1 + +### Internal / Testing Changes +- Add synth script and update CI config (#14) +- chore(deps): update dependency nyc to v13 (#12) +- chore: ignore package-lock.json (#11) +- chore(deps): lock file maintenance (#10) +- chore: update renovate config (#9) +- remove that whitespace (#8) +- chore(deps): lock file maintenance (#7) +- chore(deps): update dependency typescript to v3 (#6) +- chore: assert.deelEqual => assert.deepStrictEqual (#5) +- chore: move mocha options to mocha.opts (#4) diff --git a/node_modules/@google-cloud/paginator/LICENSE b/node_modules/@google-cloud/paginator/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/@google-cloud/paginator/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/paginator/README.md b/node_modules/@google-cloud/paginator/README.md new file mode 100644 index 0000000..e85b650 --- /dev/null +++ b/node_modules/@google-cloud/paginator/README.md @@ -0,0 +1,136 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Paginator: Node.js Client](https://github.com/googleapis/nodejs-paginator) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/paginator.svg)](https://www.npmjs.org/package/@google-cloud/paginator) + + + + +A result paging utility used by Google node.js modules + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-paginator/blob/main/CHANGELOG.md). + +* [Google Cloud Common Paginator Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-paginator](https://github.com/googleapis/nodejs-paginator) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/paginator +``` + + +### Using the client library + +```javascript +const {paginator} = require('@google-cloud/paginator'); +console.log(paginator); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-paginator/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-paginator/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-paginator&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Streamify | [source code](https://github.com/googleapis/nodejs-paginator/blob/main/samples/streamify.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-paginator&page=editor&open_in_editor=samples/streamify.js,samples/README.md) | + + + +The [Google Cloud Common Paginator Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/paginator@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-paginator/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-paginator/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/paginator/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/paginator/build/src/index.d.ts b/node_modules/@google-cloud/paginator/build/src/index.d.ts new file mode 100644 index 0000000..7a090bd --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/index.d.ts @@ -0,0 +1,131 @@ +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { TransformOptions } from 'stream'; +import { ResourceStream } from './resource-stream'; +export interface ParsedArguments extends TransformOptions { + /** + * Query object. This is most commonly an object, but to make the API more + * simple, it can also be a string in some places. + */ + query?: ParsedArguments; + /** + * Callback function. + */ + callback?: Function; + /** + * Auto-pagination enabled. + */ + autoPaginate?: boolean; + /** + * Maximum API calls to make. + */ + maxApiCalls?: number; + /** + * Maximum results to return. + */ + maxResults?: number; + pageSize?: number; + streamOptions?: ParsedArguments; +} +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +export declare class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + extend(Class: Function, methodNames: string | string[]): void; + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + streamify(methodName: string): (this: { + [index: string]: Function; + }, ...args: any[]) => ResourceStream; + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + parseArguments_(args: any[]): ParsedArguments; + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments: ParsedArguments, originalMethod: Function): any; + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + runAsStream_(parsedArguments: ParsedArguments, originalMethod: Function): ResourceStream; +} +declare const paginator: Paginator; +export { paginator }; +export { ResourceStream }; diff --git a/node_modules/@google-cloud/paginator/build/src/index.js b/node_modules/@google-cloud/paginator/build/src/index.js new file mode 100644 index 0000000..45f47e4 --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/index.js @@ -0,0 +1,206 @@ +"use strict"; +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = require("arrify"); +const extend = require("extend"); +const resource_stream_1 = require("./resource-stream"); +Object.defineProperty(exports, "ResourceStream", { enumerable: true, get: function () { return resource_stream_1.ResourceStream; } }); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + const promise = new Promise((resolve, reject) => { + paginator + .runAsStream_(parsedArguments, originalMethod) + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => resolve(results)); + }); + if (!callback) { + return promise.then(results => [results]); + } + promise.then(results => callback(null, results), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts b/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts new file mode 100644 index 0000000..c12d7a7 --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/resource-stream.d.ts @@ -0,0 +1,40 @@ +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { Transform, Writable } from 'stream'; +import { ParsedArguments } from './'; +interface ResourceEvents { + addListener(event: 'data', listener: (data: T) => void): this; + emit(event: 'data', data: T): boolean; + on(event: 'data', listener: (data: T) => void): this; + once(event: 'data', listener: (data: T) => void): this; + prependListener(event: 'data', listener: (data: T) => void): this; + prependOnceListener(event: 'data', listener: (data: T) => void): this; + removeListener(event: 'data', listener: (data: T) => void): this; +} +export declare class ResourceStream extends Transform implements ResourceEvents { + _ended: boolean; + _maxApiCalls: number; + _nextQuery: {} | null; + _reading: boolean; + _requestFn: Function; + _requestsMade: number; + _resultsToSend: number; + constructor(args: ParsedArguments, requestFn: Function); + end(...args: any[]): ReturnType extends Writable ? this : void; + _read(): void; +} +export {}; diff --git a/node_modules/@google-cloud/paginator/build/src/resource-stream.js b/node_modules/@google-cloud/paginator/build/src/resource-stream.js new file mode 100644 index 0000000..0ec90c0 --- /dev/null +++ b/node_modules/@google-cloud/paginator/build/src/resource-stream.js @@ -0,0 +1,80 @@ +"use strict"; +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ResourceStream = void 0; +const stream_1 = require("stream"); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery) => { + if (err) { + this.destroy(err); + return; + } + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/paginator/package.json b/node_modules/@google-cloud/paginator/package.json new file mode 100644 index 0000000..8454e97 --- /dev/null +++ b/node_modules/@google-cloud/paginator/package.json @@ -0,0 +1,57 @@ +{ + "name": "@google-cloud/paginator", + "version": "5.0.0", + "description": "A result paging utility used by Google node.js modules", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-paginator", + "scripts": { + "test": "c8 mocha build/test", + "compile": "tsc -p .", + "fix": "gts fix", + "prelint": "cd samples; npm link ../; npm install", + "lint": "gts check", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/extend": "^3.0.0", + "@types/mocha": "^9.0.0", + "@types/node": "^20.4.9", + "@types/proxyquire": "^1.3.28", + "@types/sinon": "^10.0.0", + "@types/uuid": "^9.0.0", + "c8": "^8.0.0", + "codecov": "^3.0.4", + "gts": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "proxyquire": "^2.0.1", + "sinon": "^15.0.0", + "typescript": "^5.1.6", + "uuid": "^9.0.0" + }, + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } +} diff --git a/node_modules/@google-cloud/projectify/CHANGELOG.md b/node_modules/@google-cloud/projectify/CHANGELOG.md new file mode 100644 index 0000000..328e4f3 --- /dev/null +++ b/node_modules/@google-cloud/projectify/CHANGELOG.md @@ -0,0 +1,196 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/@google-cloud/projectify?activeTab=versions +## [4.0.0](https://github.com/googleapis/nodejs-projectify/compare/v3.0.0...v4.0.0) (2023-08-09) + + +### ⚠ BREAKING CHANGES + +* upgrade to Node 14 ([#318](https://github.com/googleapis/nodejs-projectify/issues/318)) + +### Bug Fixes + +* Remove pip install statements ([#1546](https://github.com/googleapis/nodejs-projectify/issues/1546)) ([#304](https://github.com/googleapis/nodejs-projectify/issues/304)) ([94cfff6](https://github.com/googleapis/nodejs-projectify/commit/94cfff665b7c6b8916b5c59e1c7a3cca7ff29303)) + + +### Miscellaneous Chores + +* Upgrade to Node 14 ([#318](https://github.com/googleapis/nodejs-projectify/issues/318)) ([6e9da4d](https://github.com/googleapis/nodejs-projectify/commit/6e9da4db77fab7ed6876e755a72156960b376d57)) + +## [3.0.0](https://github.com/googleapis/nodejs-projectify/compare/v2.1.1...v3.0.0) (2022-05-20) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#299) + +### Build System + +* update library to use Node 12 ([#299](https://github.com/googleapis/nodejs-projectify/issues/299)) ([83b63ca](https://github.com/googleapis/nodejs-projectify/commit/83b63ca8cb89086a8535a9fc8abd39e95f0cecd4)) + +### [2.1.1](https://www.github.com/googleapis/nodejs-projectify/compare/v2.1.0...v2.1.1) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#267](https://www.github.com/googleapis/nodejs-projectify/issues/267)) ([9e8d6e4](https://www.github.com/googleapis/nodejs-projectify/commit/9e8d6e48c080806b42164d7be0bd11197996f245)) + +## [2.1.0](https://www.github.com/googleapis/nodejs-projectify/compare/v2.0.1...v2.1.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#245](https://www.github.com/googleapis/nodejs-projectify/issues/245)) ([30f0499](https://www.github.com/googleapis/nodejs-projectify/commit/30f0499ade5f140774c3aa672b44fd3538e72309)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-projectify/compare/v2.0.0...v2.0.1) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#197](https://www.github.com/googleapis/nodejs-projectify/issues/197)) ([3406f2a](https://www.github.com/googleapis/nodejs-projectify/commit/3406f2aa431ed04541585b63c330c04270c602aa)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.4...v2.0.0) (2020-03-24) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7 introduced some breaking changes +* drop Node 8 from engines field (#172) + +### Features + +* drop Node 8 from engines field ([#172](https://www.github.com/googleapis/nodejs-projectify/issues/172)) ([3eac424](https://www.github.com/googleapis/nodejs-projectify/commit/3eac424bfb1ee47144a77888dc68db687988945e)) + + +### Build System + +* update to latest version of gts/typescript ([#171](https://www.github.com/googleapis/nodejs-projectify/issues/171)) ([30f90cc](https://www.github.com/googleapis/nodejs-projectify/commit/30f90cc172da6ed9394da91869556bf5eef42434)) + +### [1.0.4](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.3...v1.0.4) (2019-12-05) + + +### Bug Fixes + +* **publish:** publication failed to reach npm ([#141](https://www.github.com/googleapis/nodejs-projectify/issues/141)) ([5406ba5](https://www.github.com/googleapis/nodejs-projectify/commit/5406ba5e1d43a228a19072023c1baebce34190af)) + +### [1.0.3](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.2...v1.0.3) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([6c95307](https://www.github.com/googleapis/nodejs-projectify/commit/6c953070139a77d30c4ce5b7dee1443874046906)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.1...v1.0.2) (2019-11-14) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#135](https://www.github.com/googleapis/nodejs-projectify/issues/135)) ([59301e7](https://www.github.com/googleapis/nodejs-projectify/commit/59301e7cfa855add4894dd9c46870e61fffa7413)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-projectify/compare/v1.0.0...v1.0.1) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#119](https://www.github.com/googleapis/nodejs-projectify/issues/119)) ([90a009f](https://www.github.com/googleapis/nodejs-projectify/commit/90a009f)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-projectify/compare/v0.3.3...v1.0.0) (2019-05-02) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#103](https://www.github.com/googleapis/nodejs-projectify/issues/103)) ([0149650](https://www.github.com/googleapis/nodejs-projectify/commit/0149650)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#103) + +## v0.3.3 + +03-12-2019 12:27 PDT + +This patch release contains a few updates to the docs. That's all! + +### Documentation +- docs: update links in contrib guide ([#86](https://github.com/googleapis/nodejs-projectify/pull/86)) +- docs: update contributing path in README ([#82](https://github.com/googleapis/nodejs-projectify/pull/82)) +- docs: move CONTRIBUTING.md to root ([#81](https://github.com/googleapis/nodejs-projectify/pull/81)) +- docs: add lint/fix example to contributing guide ([#79](https://github.com/googleapis/nodejs-projectify/pull/79)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#90](https://github.com/googleapis/nodejs-projectify/pull/90)) +- build: use node10 to run samples-test, system-test etc ([#89](https://github.com/googleapis/nodejs-projectify/pull/89)) +- build: update release configuration +- chore(deps): update dependency mocha to v6 +- build: use linkinator for docs test ([#85](https://github.com/googleapis/nodejs-projectify/pull/85)) +- build: create docs test npm scripts ([#84](https://github.com/googleapis/nodejs-projectify/pull/84)) +- build: test using @grpc/grpc-js in CI ([#83](https://github.com/googleapis/nodejs-projectify/pull/83)) +- build: ignore googleapis.com in doc link check ([#78](https://github.com/googleapis/nodejs-projectify/pull/78)) +- build: check for 404s in the docs ([#77](https://github.com/googleapis/nodejs-projectify/pull/77)) +- chore(build): inject yoshi automation key ([#75](https://github.com/googleapis/nodejs-projectify/pull/75)) +- chore: update nyc and eslint configs ([#74](https://github.com/googleapis/nodejs-projectify/pull/74)) +- chore: fix publish.sh permission +x ([#72](https://github.com/googleapis/nodejs-projectify/pull/72)) +- fix(build): fix Kokoro release script ([#71](https://github.com/googleapis/nodejs-projectify/pull/71)) +- build: add Kokoro configs for autorelease ([#70](https://github.com/googleapis/nodejs-projectify/pull/70)) +- chore: always nyc report before calling codecov ([#67](https://github.com/googleapis/nodejs-projectify/pull/67)) +- chore: nyc ignore build/test by default ([#66](https://github.com/googleapis/nodejs-projectify/pull/66)) +- chore(build): update prettier config ([#64](https://github.com/googleapis/nodejs-projectify/pull/64)) +- chore: update license file ([#63](https://github.com/googleapis/nodejs-projectify/pull/63)) +- fix(build): fix system key decryption ([#59](https://github.com/googleapis/nodejs-projectify/pull/59)) +- chore: add synth.metadata + +## v0.3.2 + +### Bug fixes +- fix: do not replace projectId on stream objects ([#53](https://github.com/googleapis/nodejs-projectify/pull/53)) + +### Dependencies +- chore(deps): update dependency gts to ^0.9.0 ([#52](https://github.com/googleapis/nodejs-projectify/pull/52)) + +### Internal / Testing Changes +- chore: update eslintignore config ([#51](https://github.com/googleapis/nodejs-projectify/pull/51)) +- chore: use latest npm on Windows ([#50](https://github.com/googleapis/nodejs-projectify/pull/50)) +- chore: update CircleCI config ([#49](https://github.com/googleapis/nodejs-projectify/pull/49)) +- chore: include build in eslintignore ([#46](https://github.com/googleapis/nodejs-projectify/pull/46)) + +## v0.3.1 + +### Implementation Changes +- fix: replaceProjectId should not fail when passed a Buffer ([#43](https://github.com/googleapis/nodejs-projectify/pull/43)) + +### Dependencies +- chore(deps): update dependency nyc to v13 ([#13](https://github.com/googleapis/nodejs-projectify/pull/13)) +- chore(deps): lock file maintenance ([#11](https://github.com/googleapis/nodejs-projectify/pull/11)) +- chore(deps): lock file maintenance ([#8](https://github.com/googleapis/nodejs-projectify/pull/8)) +- chore(deps): update dependency typescript to v3 ([#7](https://github.com/googleapis/nodejs-projectify/pull/7)) +- chore(deps): update dependency gts to ^0.8.0 ([#2](https://github.com/googleapis/nodejs-projectify/pull/2)) +- chore(deps): lock file maintenance ([#4](https://github.com/googleapis/nodejs-projectify/pull/4)) +- chore(deps): lock file maintenance ([#3](https://github.com/googleapis/nodejs-projectify/pull/3)) + +### Internal / Testing Changes +- chore: update issue templates ([#40](https://github.com/googleapis/nodejs-projectify/pull/40)) +- chore: remove old issue template ([#38](https://github.com/googleapis/nodejs-projectify/pull/38)) +- build: run tests on node11 ([#37](https://github.com/googleapis/nodejs-projectify/pull/37)) +- chores(build): run codecov on continuous builds ([#34](https://github.com/googleapis/nodejs-projectify/pull/34)) +- chores(build): do not collect sponge.xml from windows builds ([#35](https://github.com/googleapis/nodejs-projectify/pull/35)) +- chore: update new issue template ([#33](https://github.com/googleapis/nodejs-projectify/pull/33)) +- build: fix codecov uploading on Kokoro ([#30](https://github.com/googleapis/nodejs-projectify/pull/30)) +- Update kokoro config ([#28](https://github.com/googleapis/nodejs-projectify/pull/28)) +- Update CI config ([#26](https://github.com/googleapis/nodejs-projectify/pull/26)) +- Don't publish sourcemaps ([#24](https://github.com/googleapis/nodejs-projectify/pull/24)) +- build: prevent system/sample-test from leaking credentials +- Update kokoro config ([#22](https://github.com/googleapis/nodejs-projectify/pull/22)) +- test: remove appveyor config ([#21](https://github.com/googleapis/nodejs-projectify/pull/21)) +- Update CI config ([#20](https://github.com/googleapis/nodejs-projectify/pull/20)) +- Enable prefer-const in the eslint config ([#19](https://github.com/googleapis/nodejs-projectify/pull/19)) +- Enable no-var in eslint ([#18](https://github.com/googleapis/nodejs-projectify/pull/18)) +- Update CI config ([#17](https://github.com/googleapis/nodejs-projectify/pull/17)) +- Add synth and update CI config ([#15](https://github.com/googleapis/nodejs-projectify/pull/15)) +- chore: ignore package-lock.json ([#12](https://github.com/googleapis/nodejs-projectify/pull/12)) +- chore: update renovate config ([#10](https://github.com/googleapis/nodejs-projectify/pull/10)) +- remove that whitespace ([#9](https://github.com/googleapis/nodejs-projectify/pull/9)) +- chore: assert.deelEqual => assert.deepStrictEqual ([#6](https://github.com/googleapis/nodejs-projectify/pull/6)) +- chore: move mocha options to mocha.opts ([#5](https://github.com/googleapis/nodejs-projectify/pull/5)) diff --git a/node_modules/@google-cloud/projectify/LICENSE b/node_modules/@google-cloud/projectify/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/@google-cloud/projectify/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/projectify/README.md b/node_modules/@google-cloud/projectify/README.md new file mode 100644 index 0000000..736e96a --- /dev/null +++ b/node_modules/@google-cloud/projectify/README.md @@ -0,0 +1,138 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Projectify: Node.js Client](https://github.com/googleapis/nodejs-projectify) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/projectify.svg)](https://www.npmjs.org/package/@google-cloud/projectify) + + + + +A simple utility for replacing the projectid token in objects. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-projectify/blob/main/CHANGELOG.md). + +* [Google Cloud Common Projectify Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-projectify](https://github.com/googleapis/nodejs-projectify) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/projectify +``` + + +### Using the client library + +```javascript +const {replaceProjectIdToken} = require('@google-cloud/projectify'); +const options = { + projectId: '{{projectId}}', +}; +replaceProjectIdToken(options, 'fake-project-id'); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-projectify/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-projectify/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-projectify&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [Google Cloud Common Projectify Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/projectify@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-projectify/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-projectify/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/projectify/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/projectify/build/src/index.d.ts b/node_modules/@google-cloud/projectify/build/src/index.d.ts new file mode 100644 index 0000000..f96f13c --- /dev/null +++ b/node_modules/@google-cloud/projectify/build/src/index.d.ts @@ -0,0 +1,16 @@ +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +export declare function replaceProjectIdToken(value: any, projectId: string): any; +/** + * Custom error type for missing project ID errors. + */ +export declare class MissingProjectIdError extends Error { + message: string; +} diff --git a/node_modules/@google-cloud/projectify/build/src/index.js b/node_modules/@google-cloud/projectify/build/src/index.js new file mode 100644 index 0000000..085bff8 --- /dev/null +++ b/node_modules/@google-cloud/projectify/build/src/index.js @@ -0,0 +1,66 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = require("stream"); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/projectify/package.json b/node_modules/@google-cloud/projectify/package.json new file mode 100644 index 0000000..c191171 --- /dev/null +++ b/node_modules/@google-cloud/projectify/package.json @@ -0,0 +1,46 @@ +{ + "name": "@google-cloud/projectify", + "version": "4.0.0", + "description": "A simple utility for replacing the projectid token in objects.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-projectify", + "scripts": { + "test": "c8 mocha build/test", + "lint": "gts check", + "clean": "gts clean", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.11", + "@types/mocha": "^9.0.0", + "@types/node": "^20.4.9", + "c8": "^8.0.1", + "codecov": "^3.6.5", + "gts": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "typescript": "^5.1.6" + }, + "engines": { + "node": ">=14.0.0" + } +} diff --git a/node_modules/@google-cloud/promisify/CHANGELOG.md b/node_modules/@google-cloud/promisify/CHANGELOG.md new file mode 100644 index 0000000..0393f3e --- /dev/null +++ b/node_modules/@google-cloud/promisify/CHANGELOG.md @@ -0,0 +1,191 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/nodejs-promisify?activeTab=versions + +## [4.0.0](https://github.com/googleapis/nodejs-promisify/compare/v3.0.1...v4.0.0) (2023-08-08) + + +### ⚠ BREAKING CHANGES + +* upgrade to Node 14 ([#325](https://github.com/googleapis/nodejs-promisify/issues/325)) + +### Miscellaneous Chores + +* Upgrade to Node 14 ([#325](https://github.com/googleapis/nodejs-promisify/issues/325)) ([57d02c1](https://github.com/googleapis/nodejs-promisify/commit/57d02c1c23c65d63131bb99c07919ff80e5604cd)) + +## [3.0.1](https://github.com/googleapis/nodejs-promisify/compare/v3.0.0...v3.0.1) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-promisify/issues/1546)) ([#310](https://github.com/googleapis/nodejs-promisify/issues/310)) ([c7c6883](https://github.com/googleapis/nodejs-promisify/commit/c7c688389de72ddc0181b19bceee2d95eacd3d96)) + +## [3.0.0](https://github.com/googleapis/nodejs-promisify/compare/v2.0.4...v3.0.0) (2022-05-03) + + +### ⚠ BREAKING CHANGES + +* drop node 10 from engines list, update typescript to 4.6.3 (#300) + +### Build System + +* drop node 10 from engines list, update typescript to 4.6.3 ([#300](https://github.com/googleapis/nodejs-promisify/issues/300)) ([fed2f14](https://github.com/googleapis/nodejs-promisify/commit/fed2f145a5256c939eb66b85a5c7c48332b8841d)) + +### [2.0.4](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.3...v2.0.4) (2021-09-09) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#270](https://www.github.com/googleapis/nodejs-promisify/issues/270)) ([11242f7](https://www.github.com/googleapis/nodejs-promisify/commit/11242f7f76e170dae7a429f8d4064bf33be9bb3f)) + +### [2.0.3](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.2...v2.0.3) (2020-09-04) + + +### Bug Fixes + +* allow excluding accessor methods ([#228](https://www.github.com/googleapis/nodejs-promisify/issues/228)) ([114d8bc](https://www.github.com/googleapis/nodejs-promisify/commit/114d8bcef7093bdfda195a15e0c2f376195fd3fc)) + +### [2.0.2](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.1...v2.0.2) (2020-07-06) + + +### Bug Fixes + +* update node issue template ([#204](https://www.github.com/googleapis/nodejs-promisify/issues/204)) ([a2ba8d8](https://www.github.com/googleapis/nodejs-promisify/commit/a2ba8d8e45ef03d093d987292a467696745fc9fd)) + +### [2.0.1](https://www.github.com/googleapis/nodejs-promisify/compare/v2.0.0...v2.0.1) (2020-05-08) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-promisify/issues/468)) ([#191](https://www.github.com/googleapis/nodejs-promisify/issues/191)) ([0edc724](https://www.github.com/googleapis/nodejs-promisify/commit/0edc7246c53d25d9dd220b813561bcee97250783)) + +## [2.0.0](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.4...v2.0.0) (2020-03-23) + + +### ⚠ BREAKING CHANGES + +* update to latest version of gts/typescript (#183) +* drop Node 8 from engines field (#184) + +### Features + +* drop Node 8 from engines field ([#184](https://www.github.com/googleapis/nodejs-promisify/issues/184)) ([7e6d3c5](https://www.github.com/googleapis/nodejs-promisify/commit/7e6d3c54066d89530ed25c7f9722efd252f43fb8)) + + +### Build System + +* update to latest version of gts/typescript ([#183](https://www.github.com/googleapis/nodejs-promisify/issues/183)) ([9c3ed12](https://www.github.com/googleapis/nodejs-promisify/commit/9c3ed12c12f4bb1e17af7440c6371c4cefddcd59)) + +### [1.0.4](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.3...v1.0.4) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e48750e](https://www.github.com/googleapis/nodejs-promisify/commit/e48750ef96aa20eb3a2b73fe2f062d04430468a7)) + +### [1.0.3](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.2...v1.0.3) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#146](https://www.github.com/googleapis/nodejs-promisify/issues/146)) ([ff0ee74](https://www.github.com/googleapis/nodejs-promisify/commit/ff0ee7408f50e8f7147b8ccf7e10337aa5920076)) + +### [1.0.2](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.1...v1.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** link to reference docs section on googleapis.dev ([#128](https://www.github.com/googleapis/nodejs-promisify/issues/128)) ([5a8bd90](https://www.github.com/googleapis/nodejs-promisify/commit/5a8bd90)) + +### [1.0.1](https://www.github.com/googleapis/nodejs-promisify/compare/v1.0.0...v1.0.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#124](https://www.github.com/googleapis/nodejs-promisify/issues/124)) ([34d18cd](https://www.github.com/googleapis/nodejs-promisify/commit/34d18cd)) + +## [1.0.0](https://www.github.com/googleapis/nodejs-promisify/compare/v0.4.0...v1.0.0) (2019-05-02) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#108](https://www.github.com/googleapis/nodejs-promisify/issues/108)) ([78ab89c](https://www.github.com/googleapis/nodejs-promisify/commit/78ab89c)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#108) + +## v0.4.0 + +02-12-2019 19:44 PST + +### New features +- feat: add callbackify() and callbackifyAll() methods ([#82](https://github.com/googleapis/nodejs-promisify/pull/82)) + +### Documentation +- docs: update contributing path in README ([#86](https://github.com/googleapis/nodejs-promisify/pull/86)) +- chore: move CONTRIBUTING.md to root ([#85](https://github.com/googleapis/nodejs-promisify/pull/85)) +- docs: add lint/fix example to contributing guide ([#83](https://github.com/googleapis/nodejs-promisify/pull/83)) + +### Internal / Testing Changes +- build: create docs test npm scripts ([#88](https://github.com/googleapis/nodejs-promisify/pull/88)) +- build: test using @grpc/grpc-js in CI ([#87](https://github.com/googleapis/nodejs-promisify/pull/87)) +- build: ignore googleapis.com in doc link check ([#81](https://github.com/googleapis/nodejs-promisify/pull/81)) +- build: check broken links in generated docs ([#79](https://github.com/googleapis/nodejs-promisify/pull/79)) +- chore(deps): update dependency @types/sinon to v7 ([#78](https://github.com/googleapis/nodejs-promisify/pull/78)) +- chore(build): inject yoshi automation key ([#77](https://github.com/googleapis/nodejs-promisify/pull/77)) +- chore: update nyc and eslint configs ([#76](https://github.com/googleapis/nodejs-promisify/pull/76)) +- chore: fix publish.sh permission +x ([#74](https://github.com/googleapis/nodejs-promisify/pull/74)) +- fix(build): fix Kokoro release script ([#73](https://github.com/googleapis/nodejs-promisify/pull/73)) +- build: add Kokoro configs for autorelease ([#72](https://github.com/googleapis/nodejs-promisify/pull/72)) +- chore: always nyc report before calling codecov ([#69](https://github.com/googleapis/nodejs-promisify/pull/69)) +- chore: nyc ignore build/test by default ([#68](https://github.com/googleapis/nodejs-promisify/pull/68)) +- chore(build): update prettier config ([#66](https://github.com/googleapis/nodejs-promisify/pull/66)) +- fix: get the build passing ([#65](https://github.com/googleapis/nodejs-promisify/pull/65)) +- chore: update license file ([#64](https://github.com/googleapis/nodejs-promisify/pull/64)) +- fix(build): fix system key decryption ([#60](https://github.com/googleapis/nodejs-promisify/pull/60)) +- chore(deps): update dependency @types/sinon to v5.0.7 ([#58](https://github.com/googleapis/nodejs-promisify/pull/58)) +- fix: Pin @types/sinon to last compatible version ([#57](https://github.com/googleapis/nodejs-promisify/pull/57)) +- chore: add synth.metadata +- chore(deps): update dependency gts to ^0.9.0 ([#54](https://github.com/googleapis/nodejs-promisify/pull/54)) +- chore: update eslintignore config ([#53](https://github.com/googleapis/nodejs-promisify/pull/53)) +- chore: use latest npm on Windows ([#52](https://github.com/googleapis/nodejs-promisify/pull/52)) +- chore: update CircleCI config ([#51](https://github.com/googleapis/nodejs-promisify/pull/51)) +- chore: include build in eslintignore ([#48](https://github.com/googleapis/nodejs-promisify/pull/48)) +- chore: update issue templates ([#44](https://github.com/googleapis/nodejs-promisify/pull/44)) +- chore: remove old issue template ([#42](https://github.com/googleapis/nodejs-promisify/pull/42)) +- build: run tests on node11 ([#41](https://github.com/googleapis/nodejs-promisify/pull/41)) +- chores(build): do not collect sponge.xml from windows builds ([#40](https://github.com/googleapis/nodejs-promisify/pull/40)) +- chores(build): run codecov on continuous builds ([#39](https://github.com/googleapis/nodejs-promisify/pull/39)) +- chore: update new issue template ([#38](https://github.com/googleapis/nodejs-promisify/pull/38)) +- chore(deps): update dependency sinon to v7 ([#33](https://github.com/googleapis/nodejs-promisify/pull/33)) +- build: fix codecov uploading on Kokoro ([#34](https://github.com/googleapis/nodejs-promisify/pull/34)) +- Update kokoro config ([#30](https://github.com/googleapis/nodejs-promisify/pull/30)) +- Update CI config ([#28](https://github.com/googleapis/nodejs-promisify/pull/28)) +- Don't publish sourcemaps ([#26](https://github.com/googleapis/nodejs-promisify/pull/26)) +- Update kokoro config ([#24](https://github.com/googleapis/nodejs-promisify/pull/24)) +- test: remove appveyor config ([#23](https://github.com/googleapis/nodejs-promisify/pull/23)) +- Update CI config ([#22](https://github.com/googleapis/nodejs-promisify/pull/22)) +- Enable prefer-const in the eslint config ([#21](https://github.com/googleapis/nodejs-promisify/pull/21)) +- Enable no-var in eslint ([#19](https://github.com/googleapis/nodejs-promisify/pull/19)) +- Update CI config ([#18](https://github.com/googleapis/nodejs-promisify/pull/18)) + +## v0.3.1 + +### Internal / Testing Changes +- Add synth script and update CI (#14) +- chore(deps): update dependency nyc to v13 (#12) +- chore: ignore package-lock.json (#11) +- chore(deps): lock file maintenance (#10) +- chore: update renovate config (#9) +- remove that whitespace (#8) +- chore(deps): lock file maintenance (#7) +- chore(deps): update dependency typescript to v3 (#6) +- chore: assert.deelEqual => assert.deepStrictEqual (#5) +- chore: move mocha options to mocha.opts (#4) +- chore(deps): update dependency gts to ^0.8.0 (#1) +- chore(deps): lock file maintenance (#3) +- chore(deps): lock file maintenance (#2) diff --git a/node_modules/@google-cloud/promisify/LICENSE b/node_modules/@google-cloud/promisify/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/@google-cloud/promisify/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/promisify/README.md b/node_modules/@google-cloud/promisify/README.md new file mode 100644 index 0000000..5702438 --- /dev/null +++ b/node_modules/@google-cloud/promisify/README.md @@ -0,0 +1,156 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Common Promisify: Node.js Client](https://github.com/googleapis/nodejs-promisify) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/promisify.svg)](https://www.npmjs.org/package/@google-cloud/promisify) + + + + +A simple utility for promisifying functions and classes. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-promisify/blob/main/CHANGELOG.md). + +* [Google Cloud Common Promisify Node.js Client API Reference][client-docs] + +* [github.com/googleapis/nodejs-promisify](https://github.com/googleapis/nodejs-promisify) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install @google-cloud/promisify +``` + + +### Using the client library + +```javascript +const {promisify} = require('@google-cloud/promisify'); + +/** + * This is a very basic example function that accepts a callback. + */ +function someCallbackFunction(name, callback) { + if (!name) { + callback(new Error('Name is required!')); + } else { + callback(null, `Well hello there, ${name}!`); + } +} + +// let's promisify it! +const somePromiseFunction = promisify(someCallbackFunction); + +async function quickstart() { + // now we can just `await` the function to use it like a promisified method + const [result] = await somePromiseFunction('nodestronaut'); + console.log(result); +} +quickstart(); + +``` +It's unlikely you will need to install this package directly, as it will be +installed as a dependency when you install other `@google-cloud` packages. + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-promisify/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/nodejs-promisify/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-promisify&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [Google Cloud Common Promisify Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/promisify@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-promisify/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-promisify/blob/main/LICENSE) + +[client-docs]: https://googleapis.dev/nodejs/promisify/latest + +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/promisify/build/src/index.d.ts b/node_modules/@google-cloud/promisify/build/src/index.d.ts new file mode 100644 index 0000000..9d75a39 --- /dev/null +++ b/node_modules/@google-cloud/promisify/build/src/index.d.ts @@ -0,0 +1,61 @@ +export interface PromisifyAllOptions extends PromisifyOptions { + /** + * Array of methods to ignore when promisifying. + */ + exclude?: string[]; +} +export interface PromisifyOptions { + /** + * Resolve the promise with single arg instead of an array. + */ + singular?: boolean; +} +export interface PromiseMethod extends Function { + promisified_?: boolean; +} +export interface WithPromise { + Promise?: PromiseConstructor; +} +export interface CallbackifyAllOptions { + /** + * Array of methods to ignore when callbackifying. + */ + exclude?: string[]; +} +export interface CallbackMethod extends Function { + callbackified_?: boolean; +} +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +export declare function promisify(originalMethod: PromiseMethod, options?: PromisifyOptions): any; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +export declare function promisifyAll(Class: Function, options?: PromisifyAllOptions): void; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +export declare function callbackify(originalMethod: CallbackMethod): CallbackMethod; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +export declare function callbackifyAll(Class: Function, options?: CallbackifyAllOptions): void; diff --git a/node_modules/@google-cloud/promisify/build/src/index.js b/node_modules/@google-cloud/promisify/build/src/index.js new file mode 100644 index 0000000..3199b96 --- /dev/null +++ b/node_modules/@google-cloud/promisify/build/src/index.js @@ -0,0 +1,148 @@ +"use strict"; +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@google-cloud/promisify/package.json b/node_modules/@google-cloud/promisify/package.json new file mode 100644 index 0000000..509150b --- /dev/null +++ b/node_modules/@google-cloud/promisify/package.json @@ -0,0 +1,50 @@ +{ + "name": "@google-cloud/promisify", + "version": "4.0.0", + "description": "A simple utility for promisifying functions and classes.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "repository": "googleapis/nodejs-promisify", + "scripts": { + "test": "c8 mocha build/test", + "lint": "gts check", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "docs": "compodoc src/", + "presystem-test": "npm run compile", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [], + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "author": "Google Inc.", + "license": "Apache-2.0", + "devDependencies": { + "@compodoc/compodoc": "^1.1.9", + "@types/mocha": "^9.0.0", + "@types/node": "^20.4.8", + "@types/sinon": "^10.0.0", + "c8": "^8.0.1", + "chai": "^4.2.0", + "codecov": "^3.0.4", + "gts": "^5.0.0", + "hard-rejection": "^2.1.0", + "linkinator": "^5.0.1", + "mocha": "^8.0.0", + "sinon": "^15.0.0", + "typescript": "^5.1.6" + }, + "engines": { + "node": ">=14" + } +} diff --git a/node_modules/@google-cloud/storage/CHANGELOG.md b/node_modules/@google-cloud/storage/CHANGELOG.md new file mode 100644 index 0000000..b0453e0 --- /dev/null +++ b/node_modules/@google-cloud/storage/CHANGELOG.md @@ -0,0 +1,1643 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/@google-cloud/storage?activeTab=versions + +## [7.9.0](https://github.com/googleapis/nodejs-storage/compare/v7.8.0...v7.9.0) (2024-03-18) + + +### Features + +* Add ability to configure and utilize soft-delete and restore ([#2425](https://github.com/googleapis/nodejs-storage/issues/2425)) ([7da5a7d](https://github.com/googleapis/nodejs-storage/commit/7da5a7da86ad649a8132e3183f4b3e3f9bb2eace)) + +## [7.8.0](https://github.com/googleapis/nodejs-storage/compare/v7.7.0...v7.8.0) (2024-03-07) + + +### Features + +* Add includeFoldersAsPrefixes for managed folders ([#2413](https://github.com/googleapis/nodejs-storage/issues/2413)) ([3044d3c](https://github.com/googleapis/nodejs-storage/commit/3044d3cfb1b4a24f07fd6ec29e3d20d5818c4ca3)) +* Base TPC Support ([#2397](https://github.com/googleapis/nodejs-storage/issues/2397)) ([a3f4891](https://github.com/googleapis/nodejs-storage/commit/a3f4891ee60e57cc19929489cae6110b07955216)) + + +### Bug Fixes + +* Cannot read properties of null (reading length) in stream-shift ([#2422](https://github.com/googleapis/nodejs-storage/issues/2422)) ([11ebe2b](https://github.com/googleapis/nodejs-storage/commit/11ebe2bf905f8c15101446ecfe5a2d7c6005d0c3)) +* Do not automatically set overrideUnlockedRetention ([#2421](https://github.com/googleapis/nodejs-storage/issues/2421)) ([c781bdc](https://github.com/googleapis/nodejs-storage/commit/c781bdcd89f63b22af1c491a6e517e110331d4ca)) + +## [7.7.0](https://github.com/googleapis/nodejs-storage/compare/v7.6.0...v7.7.0) (2023-11-29) + + +### Features + +* Implement object retention lock for bucket / files ([#2365](https://github.com/googleapis/nodejs-storage/issues/2365)) ([c140868](https://github.com/googleapis/nodejs-storage/commit/c140868bb5c7d1f0edd586fb2ca55bc613caf5d4)) + + +### Bug Fixes + +* TransferManager#downloadFileInChunks issues ([#2373](https://github.com/googleapis/nodejs-storage/issues/2373)) ([65950f3](https://github.com/googleapis/nodejs-storage/commit/65950f3d5c2ed73c56afdf579d7a949b4505e649)) + +## [7.6.0](https://github.com/googleapis/nodejs-storage/compare/v7.5.0...v7.6.0) (2023-11-08) + + +### Features + +* Resume Resumable Uploads ([#2333](https://github.com/googleapis/nodejs-storage/issues/2333)) ([2ba4009](https://github.com/googleapis/nodejs-storage/commit/2ba4009e599c5690c51849f307199be3452d3b07)) + +## [7.5.0](https://github.com/googleapis/nodejs-storage/compare/v7.4.0...v7.5.0) (2023-10-30) + + +### Features + +* Support autoclass v2.1 ([#2325](https://github.com/googleapis/nodejs-storage/issues/2325)) ([6572ce9](https://github.com/googleapis/nodejs-storage/commit/6572ce9dc146a0d1e13418a43204058d5909510e)) + +## [7.4.0](https://github.com/googleapis/nodejs-storage/compare/v7.3.2...v7.4.0) (2023-10-24) + + +### Features + +* Support building in CJS and ESM formats ([#2296](https://github.com/googleapis/nodejs-storage/issues/2296)) ([c848076](https://github.com/googleapis/nodejs-storage/commit/c84807662839b3671230a50b2a0c6f7a6efef528)) + +## [7.3.2](https://github.com/googleapis/nodejs-storage/compare/v7.3.1...v7.3.2) (2023-10-24) + + +### Bug Fixes + +* Close Open Handle for Empty Objects ([#2338](https://github.com/googleapis/nodejs-storage/issues/2338)) ([c51cd94](https://github.com/googleapis/nodejs-storage/commit/c51cd946171e8749453eef080d2853d31a6e72c8)) + +## [7.3.1](https://github.com/googleapis/nodejs-storage/compare/v7.3.0...v7.3.1) (2023-10-19) + + +### Bug Fixes + +* Add user-agent header to transfer manager and resumable upload ([#2334](https://github.com/googleapis/nodejs-storage/issues/2334)) ([0520867](https://github.com/googleapis/nodejs-storage/commit/0520867e51a2758ddf2773c0d910c937d55e21b1)) +* **deps:** Update dependency retry-request to v7 ([#2327](https://github.com/googleapis/nodejs-storage/issues/2327)) ([f20ef3c](https://github.com/googleapis/nodejs-storage/commit/f20ef3cb7bf8cbdda988e792edc7abd9a1b516f1)) +* Destroy sockets to stop memory leaking when stream errors ([#2336](https://github.com/googleapis/nodejs-storage/issues/2336)) ([04939ee](https://github.com/googleapis/nodejs-storage/commit/04939ee4976581cc4168943f0c578bc49458cff7)) + +## [7.3.0](https://github.com/googleapis/nodejs-storage/compare/v7.2.0...v7.3.0) (2023-10-11) + + +### Features + +* Add transfer manager MPU sample, adjust defaults, prepare for GA ([#2318](https://github.com/googleapis/nodejs-storage/issues/2318)) ([a7d09c1](https://github.com/googleapis/nodejs-storage/commit/a7d09c16aa7f732db089c55d2692ce5fd88c52f3)) + + +### Bug Fixes + +* Simplify the code for downloadInChunks ([#2323](https://github.com/googleapis/nodejs-storage/issues/2323)) ([6519929](https://github.com/googleapis/nodejs-storage/commit/6519929153e0006b61d205860de93c9e64b99d81)) + +## [7.2.0](https://github.com/googleapis/nodejs-storage/compare/v7.1.0...v7.2.0) (2023-10-05) + + +### Features + +* Add headers option to MPU ([#2303](https://github.com/googleapis/nodejs-storage/issues/2303)) ([7f58f30](https://github.com/googleapis/nodejs-storage/commit/7f58f30588735d129fda0503e1daec5f605f8447)) +* Transfer Manager Metrics ([#2305](https://github.com/googleapis/nodejs-storage/issues/2305)) ([9be3b6a](https://github.com/googleapis/nodejs-storage/commit/9be3b6a97d9c4685d3c01a6d44e618087be54ea1)) + + +### Bug Fixes + +* Make sure destination uses posix separator instead of win ([#2304](https://github.com/googleapis/nodejs-storage/issues/2304)) ([1d4ea74](https://github.com/googleapis/nodejs-storage/commit/1d4ea74a3cc441dfccc47893f0318234f213921b)) + +## [7.1.0](https://github.com/googleapis/nodejs-storage/compare/v7.0.1...v7.1.0) (2023-09-07) + + +### Features + +* Export `ApiError` ([#2291](https://github.com/googleapis/nodejs-storage/issues/2291)) ([c1d1b35](https://github.com/googleapis/nodejs-storage/commit/c1d1b3505b1c6b0306f632a48cbd7d774b2b94d0)) +* Support iterables in file@save ([4356dd0](https://github.com/googleapis/nodejs-storage/commit/4356dd0e6bea5241c4cacd1a58697a332ccf4784)) +* Support iterables in file@save ([49327ff](https://github.com/googleapis/nodejs-storage/commit/49327ff576b2367d9efdff2f82a515b0538ee471)) +* Support iterables in file@save ([c0d9d58](https://github.com/googleapis/nodejs-storage/commit/c0d9d58b56a9a3485b6c0e5eb92411bb094f7bcb)) + + +### Bug Fixes + +* `File#save` iterable fixes ([#2293](https://github.com/googleapis/nodejs-storage/issues/2293)) ([87c3f41](https://github.com/googleapis/nodejs-storage/commit/87c3f419e2a5a3a30ea581aaa6127dfac261be17)) +* **deps:** Update dependency @google-cloud/paginator to v5 ([#2263](https://github.com/googleapis/nodejs-storage/issues/2263)) ([0c9b342](https://github.com/googleapis/nodejs-storage/commit/0c9b3425b47c3031ec4bac6d45d8cdca48b2f1a6)) +* **deps:** Update dependency @google-cloud/projectify to v4 ([#2264](https://github.com/googleapis/nodejs-storage/issues/2264)) ([c881bae](https://github.com/googleapis/nodejs-storage/commit/c881bae96b40f609d2b7a8d7388c6a76d34faab1)) +* **deps:** Update dependency @google-cloud/promisify to v4 ([#2262](https://github.com/googleapis/nodejs-storage/issues/2262)) ([9d46ff3](https://github.com/googleapis/nodejs-storage/commit/9d46ff3c02315c5a3516aa5f2755ee0471ba036b)) +* **deps:** Update dependency @google-cloud/pubsub to v4 ([#2256](https://github.com/googleapis/nodejs-storage/issues/2256)) ([18282bb](https://github.com/googleapis/nodejs-storage/commit/18282bbefe1201e51867c676a86301f8086aaf1e)) + + +### Miscellaneous Chores + +* Release 7.1.0 ([#2274](https://github.com/googleapis/nodejs-storage/issues/2274)) ([e0f45c2](https://github.com/googleapis/nodejs-storage/commit/e0f45c24b44d11c72ff40cc534be11cb2a65192f)) + +## [7.0.1](https://github.com/googleapis/nodejs-storage/compare/v7.0.0...v7.0.1) (2023-08-07) + + +### Bug Fixes + +* Export new types from index.ts ([#2258](https://github.com/googleapis/nodejs-storage/issues/2258)) ([93d2a0f](https://github.com/googleapis/nodejs-storage/commit/93d2a0f9f6d6a3de652aaeb3dd914bbc0bb593e0)) + +## [7.0.0](https://github.com/googleapis/nodejs-storage/compare/v6.12.0...v7.0.0) (2023-08-03) + + +### ⚠ BREAKING CHANGES + +* Make node 14 the minimum supported version ([#2244](https://github.com/googleapis/nodejs-storage/issues/2244)) +* Add stronger typings to metadata ([#2234](https://github.com/googleapis/nodejs-storage/issues/2234)) +* Remove extend and Treat Provided Options as Mutable ([#2204](https://github.com/googleapis/nodejs-storage/issues/2204)) +* Stronger typing for lifecycle rules ([#2215](https://github.com/googleapis/nodejs-storage/issues/2215)) +* Do not return responsebody in delete, only raw response ([#2236](https://github.com/googleapis/nodejs-storage/issues/2236)) +* Remove extraneous validation in favor of server errors ([#2237](https://github.com/googleapis/nodejs-storage/issues/2237)) +* Mark bucket.setLabels and associated interfaces as deprecated ([#2214](https://github.com/googleapis/nodejs-storage/issues/2214)) +* Disable source maps for smaller bundle size ([#2240](https://github.com/googleapis/nodejs-storage/issues/2240)) + + + +### Features + +* Make node 14 the minimum supported version ([#2244](https://github.com/googleapis/nodejs-storage/issues/2244)) ([f48dcd2](https://github.com/googleapis/nodejs-storage/commit/f48dcd2d00081aea8990f35b68a91248f3862abe)) + +## [6.12.0](https://github.com/googleapis/nodejs-storage/compare/v6.11.0...v6.12.0) (2023-07-13) + + +### Features + +* Add header for deno runtime for metrics tracking ([#2220](https://github.com/googleapis/nodejs-storage/issues/2220)) ([5083920](https://github.com/googleapis/nodejs-storage/commit/50839209063e75996b2a57bd7664760e0e5331ca)) +* MPU for transfer manager ([#2192](https://github.com/googleapis/nodejs-storage/issues/2192)) ([ae83421](https://github.com/googleapis/nodejs-storage/commit/ae83421e617a5761c75a7c8a15eaa1ea7c7fb1de)) + +## [6.11.0](https://github.com/googleapis/nodejs-storage/compare/v6.10.1...v6.11.0) (2023-06-02) + + +### Features + +* Add support for matchGlob list option ([#2206](https://github.com/googleapis/nodejs-storage/issues/2206)) ([79dd839](https://github.com/googleapis/nodejs-storage/commit/79dd8394fbbc0c97aa3acb86ad2248fd58b243b4)) + +## [6.10.1](https://github.com/googleapis/nodejs-storage/compare/v6.10.0...v6.10.1) (2023-05-10) + + +### Performance Improvements + +* Improve Multiple Chunk Upload Performance ([#2185](https://github.com/googleapis/nodejs-storage/issues/2185)) ([3b2b877](https://github.com/googleapis/nodejs-storage/commit/3b2b87707072e5dc9221a5ba3c727c70db13a593)) + +## [6.10.0](https://github.com/googleapis/nodejs-storage/compare/v6.9.5...v6.10.0) (2023-05-02) + + +### Features + +* Retry Socket Connection Timeouts on Node 20+ ([#2187](https://github.com/googleapis/nodejs-storage/issues/2187)) ([733b560](https://github.com/googleapis/nodejs-storage/commit/733b560c2f634884dd31c916c208ee6395d4fbe1)) + +## [6.9.5](https://github.com/googleapis/nodejs-storage/compare/v6.9.4...v6.9.5) (2023-03-30) + + +### Bug Fixes + +* Check that err.message is a string before attempting indexOf ([#2173](https://github.com/googleapis/nodejs-storage/issues/2173)) ([130818d](https://github.com/googleapis/nodejs-storage/commit/130818d291a0004e5d36e5ee72a8c0687b9db181)) +* V4 Signing Errors with exactly 7 day expiry ([#2170](https://github.com/googleapis/nodejs-storage/issues/2170)) ([f930998](https://github.com/googleapis/nodejs-storage/commit/f9309985d130a574dd23ecf7b6fb5b58b01d42a0)) + +## [6.9.4](https://github.com/googleapis/nodejs-storage/compare/v6.9.3...v6.9.4) (2023-03-02) + + +### Bug Fixes + +* Refactor createReadStream to remove unnecessary stream ([#2153](https://github.com/googleapis/nodejs-storage/issues/2153)) ([2c97310](https://github.com/googleapis/nodejs-storage/commit/2c97310da9edd1afbf61711631979433f10249a5)) + +## [6.9.3](https://github.com/googleapis/nodejs-storage/compare/v6.9.2...v6.9.3) (2023-02-15) + + +### Bug Fixes + +* Reduce memory footprint of deleteFiles by utilizing getFilesStream and using smaller queue of promises ([#2147](https://github.com/googleapis/nodejs-storage/issues/2147)) ([f792f25](https://github.com/googleapis/nodejs-storage/commit/f792f25e3fd38003056f649eaa638a782290cbac)) + +## [6.9.2](https://github.com/googleapis/nodejs-storage/compare/v6.9.1...v6.9.2) (2023-02-06) + + +### Bug Fixes + +* Correctly handle if a user has no permissions when calling iam.testPermissions ([#2140](https://github.com/googleapis/nodejs-storage/issues/2140)) ([cce902d](https://github.com/googleapis/nodejs-storage/commit/cce902d27cf3c4a23730550b72d10dc76425c974)) + +## [6.9.1](https://github.com/googleapis/nodejs-storage/compare/v6.9.0...v6.9.1) (2023-01-24) + + +### Bug Fixes + +* Setting file metadata is conditionally idempotent on ifmetagenerationmatch not ifgenerationmatch ([#2131](https://github.com/googleapis/nodejs-storage/issues/2131)) ([f20c28c](https://github.com/googleapis/nodejs-storage/commit/f20c28c5875c9a7095b028912550512459fbf844)) + +## [6.9.0](https://github.com/googleapis/nodejs-storage/compare/v6.8.0...v6.9.0) (2023-01-04) + + +### Features + +* Add ability to upload entire directory, add samples for upload … ([#2118](https://github.com/googleapis/nodejs-storage/issues/2118)) ([b0f32ce](https://github.com/googleapis/nodejs-storage/commit/b0f32ced04eae1b8ad88a0939fa763cb16b08df9)) + +## [6.8.0](https://github.com/googleapis/nodejs-storage/compare/v6.7.0...v6.8.0) (2022-12-07) + + +### Features + +* Implement parallel operations ([#2067](https://github.com/googleapis/nodejs-storage/issues/2067)) ([#2109](https://github.com/googleapis/nodejs-storage/issues/2109)) ([ce15b5e](https://github.com/googleapis/nodejs-storage/commit/ce15b5ef68353efbc005cb3a1a780e064ea04deb)) + +## [6.7.0](https://github.com/googleapis/nodejs-storage/compare/v6.6.0...v6.7.0) (2022-11-03) + + +### Features + +* Support autoclass ([#2078](https://github.com/googleapis/nodejs-storage/issues/2078)) ([7e83580](https://github.com/googleapis/nodejs-storage/commit/7e8358008467dd2d77702734e05f54bc06c9ca5b)) + +## [6.6.0](https://github.com/googleapis/nodejs-storage/compare/v6.5.4...v6.6.0) (2022-10-25) + + +### Features + +* **crc32c:** Convenient Method For Reading Files ([#2095](https://github.com/googleapis/nodejs-storage/issues/2095)) ([2145c81](https://github.com/googleapis/nodejs-storage/commit/2145c8177f3659fb5f193045866fbcd1220aaeaf)) + + +### Bug Fixes + +* Remove async from final function which causes double invocation … ([#2094](https://github.com/googleapis/nodejs-storage/issues/2094)) ([1a3df98](https://github.com/googleapis/nodejs-storage/commit/1a3df985e9096229bc2909921b4974680e12be2a)) + +## [6.5.4](https://github.com/googleapis/nodejs-storage/compare/v6.5.3...v6.5.4) (2022-10-20) + + +### Bug Fixes + +* Revert STORAGE_EMULATOR_HOST handling ([#2089](https://github.com/googleapis/nodejs-storage/issues/2089)) ([48dce65](https://github.com/googleapis/nodejs-storage/commit/48dce654064470f7496d160d87b696ab5cfd65d4)) + +## [6.5.3](https://github.com/googleapis/nodejs-storage/compare/v6.5.2...v6.5.3) (2022-10-18) + + +### Bug Fixes + +* Correct STORAGE_EMULATOR_HOST handling ([#2069](https://github.com/googleapis/nodejs-storage/issues/2069), [#1314](https://github.com/googleapis/nodejs-storage/issues/1314)) ([#2070](https://github.com/googleapis/nodejs-storage/issues/2070)) ([c75b8b8](https://github.com/googleapis/nodejs-storage/commit/c75b8b82262dddb794304a71f648bd6e03cafb30)) + +## [6.5.2](https://github.com/googleapis/nodejs-storage/compare/v6.5.1...v6.5.2) (2022-09-23) + + +### Bug Fixes + +* Determine `Content-Length` Before Attempting Multi-chunk Upload ([#2074](https://github.com/googleapis/nodejs-storage/issues/2074)) ([666402a](https://github.com/googleapis/nodejs-storage/commit/666402a6a65c2ee8e91bc4fe072e91c0b893864e)) + +## [6.5.1](https://github.com/googleapis/nodejs-storage/compare/v6.5.0...v6.5.1) (2022-09-20) + + +### Bug Fixes + +* Revert skip validation ([#2023](https://github.com/googleapis/nodejs-storage/issues/2023)) ([70ab224](https://github.com/googleapis/nodejs-storage/commit/70ab22459b51b9781e40b0cc86663c1658e43520)) + +## [6.5.0](https://github.com/googleapis/nodejs-storage/compare/v6.4.2...v6.5.0) (2022-09-15) + + +### Features + +* Add multiple lifecycle rules ([#2062](https://github.com/googleapis/nodejs-storage/issues/2062)) ([fbe2deb](https://github.com/googleapis/nodejs-storage/commit/fbe2deb72bd98db54a03cf228a360a871ba1915b)) + +## [6.4.2](https://github.com/googleapis/nodejs-storage/compare/v6.4.1...v6.4.2) (2022-09-01) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/nodejs-storage/issues/1546)) ([#2049](https://github.com/googleapis/nodejs-storage/issues/2049)) ([c90ba0f](https://github.com/googleapis/nodejs-storage/commit/c90ba0feecca9d39de3c52f12cc9423b7a2d4d47)) +* truncated `createReadStream` through early `end` event ([#2056](https://github.com/googleapis/nodejs-storage/issues/2056)) ([a4716a4](https://github.com/googleapis/nodejs-storage/commit/a4716a4ed1053560f2692647c8a90131763c1e72)) + +## [6.4.1](https://github.com/googleapis/nodejs-storage/compare/v6.4.0...v6.4.1) (2022-08-12) + + +### Bug Fixes + +* Remove `pumpify` ([#2029](https://github.com/googleapis/nodejs-storage/issues/2029)) ([edc1d64](https://github.com/googleapis/nodejs-storage/commit/edc1d64069a6038c301c3b775f116fbf69b10b28)) +* Retry `EPIPE` Connection Errors + Attempt Retries in Resumable Upload Connection Errors ([#2040](https://github.com/googleapis/nodejs-storage/issues/2040)) ([ba35321](https://github.com/googleapis/nodejs-storage/commit/ba35321c3fef9a1874ff8fbea43885e2e7746b4f)) + +## [6.4.0](https://github.com/googleapis/nodejs-storage/compare/v6.3.0...v6.4.0) (2022-08-10) + + +### Features + +* add functionality for passing preconditions at the function level ([#1993](https://github.com/googleapis/nodejs-storage/issues/1993)) ([21f173e](https://github.com/googleapis/nodejs-storage/commit/21f173eb17d4216e2b42ffdd1ed0104aeda7cf13)) + +## [6.3.0](https://github.com/googleapis/nodejs-storage/compare/v6.2.3...v6.3.0) (2022-08-01) + + +### Features + +* custom dual regions refactor implementation ([#2012](https://github.com/googleapis/nodejs-storage/issues/2012)) ([9a614fb](https://github.com/googleapis/nodejs-storage/commit/9a614fb2b624f8234fa9d40f352dae177b2b0374)) + +## [6.2.3](https://github.com/googleapis/nodejs-storage/compare/v6.2.2...v6.2.3) (2022-07-13) + + +### Bug Fixes + +* force setMetadata calls to use promise version, invoke callbacks manually ([#2000](https://github.com/googleapis/nodejs-storage/issues/2000)) ([f488647](https://github.com/googleapis/nodejs-storage/commit/f488647d5ac8ccfdb479d7ea24a377d10573b8c4)) + +## [6.2.2](https://github.com/googleapis/nodejs-storage/compare/v6.2.1...v6.2.2) (2022-06-29) + + +### Bug Fixes + +* correctly handle an empty file in download function ([#1995](https://github.com/googleapis/nodejs-storage/issues/1995)) ([f1a5a0b](https://github.com/googleapis/nodejs-storage/commit/f1a5a0bd121b84c3d56a9eddbd31593089634574)) + +## [6.2.1](https://github.com/googleapis/nodejs-storage/compare/v6.2.0...v6.2.1) (2022-06-27) + + +### Bug Fixes + +* explicitly import URL so that .d.ts file is correctly generated ([#1992](https://github.com/googleapis/nodejs-storage/issues/1992)) ([a968f50](https://github.com/googleapis/nodejs-storage/commit/a968f508e7c98e160b658ffc3ab568997548172f)) + +## [6.2.0](https://github.com/googleapis/nodejs-storage/compare/v6.1.0...v6.2.0) (2022-06-22) + + +### Features + +* Convenient `gs://` URI retrieval ([#1987](https://github.com/googleapis/nodejs-storage/issues/1987)) ([58fad6d](https://github.com/googleapis/nodejs-storage/commit/58fad6d9a3bd92966306e98fd7dedd3992995cb7)) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/projectify to v3 ([#1986](https://github.com/googleapis/nodejs-storage/issues/1986)) ([71a61ec](https://github.com/googleapis/nodejs-storage/commit/71a61ec4ef2436bc091f390af14a02b6920b2b87)) +* **deps:** update dependency @google-cloud/pubsub to v3 ([#1972](https://github.com/googleapis/nodejs-storage/issues/1972)) ([004b97e](https://github.com/googleapis/nodejs-storage/commit/004b97ec0ae866997c84ee2327db87e59f510f00)) +* Requests Respect `config.projectIdRequired` === `false` ([#1988](https://github.com/googleapis/nodejs-storage/issues/1988)) ([8813369](https://github.com/googleapis/nodejs-storage/commit/881336944be37e15c45b12973064245adc519860)) + +## [6.1.0](https://github.com/googleapis/nodejs-storage/compare/v6.0.1...v6.1.0) (2022-06-08) + + +### Features + +* support OLM Prefix/Suffix ([#1847](https://github.com/googleapis/nodejs-storage/issues/1847)) ([c22984c](https://github.com/googleapis/nodejs-storage/commit/c22984caa8e8ae09a61d308876b2b3d97503777b)) + +### [6.0.1](https://github.com/googleapis/nodejs-storage/compare/v6.0.0...v6.0.1) (2022-05-25) + + +### Bug Fixes + +* capture and throw on non-existent files ([#1969](https://github.com/googleapis/nodejs-storage/issues/1969)) ([52d81c0](https://github.com/googleapis/nodejs-storage/commit/52d81c026f30aef0902ea7173dfa6da2e7f97d50)) + +## [6.0.0](https://github.com/googleapis/nodejs-storage/compare/v5.20.5...v6.0.0) (2022-05-24) + + +### ⚠ BREAKING CHANGES + +* update TypeScript +* remove deprecated fields +* remove configstore +* align resumable upload behavior +* utilize internalized CRC32C utilities +* drop node 10 support + +### Build System + +* drop node 10 support ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + + +### Code Refactoring + +* align resumable upload behavior ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* remove configstore ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* remove deprecated fields ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) +* utilize internalized CRC32C utilities ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + + +### deps + +* update TypeScript ([77fa8d9](https://github.com/googleapis/nodejs-storage/commit/77fa8d9f95afbc830b57188ce0d2dfac46476d0b)) + +### [5.20.5](https://github.com/googleapis/nodejs-storage/compare/v5.20.4...v5.20.5) (2022-05-19) + + +### Bug Fixes + +* **chore:** move uuid package to dependencies ([#1952](https://github.com/googleapis/nodejs-storage/issues/1952)) ([0ff5aa3](https://github.com/googleapis/nodejs-storage/commit/0ff5aa3e9ff8b4dcc536b9494e44b9a2fb217727)) + +### [5.20.4](https://github.com/googleapis/nodejs-storage/compare/v5.20.3...v5.20.4) (2022-05-18) + + +### Bug Fixes + +* revert native typescript mocha tests ([#1947](https://github.com/googleapis/nodejs-storage/issues/1947)) ([1d0ea7d](https://github.com/googleapis/nodejs-storage/commit/1d0ea7d2281a049bc99c6bd810dd24ffc83c6a09)) +* support empty object uploads for resumable upload ([#1949](https://github.com/googleapis/nodejs-storage/issues/1949)) ([da6016e](https://github.com/googleapis/nodejs-storage/commit/da6016e20b681d6a75ed1a5459cfd333b58c70a9)) + +### [5.20.3](https://github.com/googleapis/nodejs-storage/compare/v5.20.2...v5.20.3) (2022-05-17) + + +### Bug Fixes + +* move retrieval of package.json to utility function ([#1941](https://github.com/googleapis/nodejs-storage/issues/1941)) ([ac5cbdf](https://github.com/googleapis/nodejs-storage/commit/ac5cbdf0d3c363e7dab43e9002b01a0dae877642)) + +### [5.20.2](https://github.com/googleapis/nodejs-storage/compare/v5.20.1...v5.20.2) (2022-05-17) + + +### Bug Fixes + +* use path.join and __dirname for require package.json ([#1936](https://github.com/googleapis/nodejs-storage/issues/1936)) ([b868762](https://github.com/googleapis/nodejs-storage/commit/b86876201ec7d4ce58ae5c1d9635dad82a1fdc4b)) + +### [5.20.1](https://github.com/googleapis/nodejs-storage/compare/v5.20.0...v5.20.1) (2022-05-16) + + +### Bug Fixes + +* do not use import on package.json ([#1932](https://github.com/googleapis/nodejs-storage/issues/1932)) ([d0f0494](https://github.com/googleapis/nodejs-storage/commit/d0f04941f1cabf7c153f7e5abb91c358f12ef83e)) + +## [5.20.0](https://github.com/googleapis/nodejs-storage/compare/v5.19.4...v5.20.0) (2022-05-16) + + +### Features + +* add x-goog-api-client headers for retry metrics ([#1920](https://github.com/googleapis/nodejs-storage/issues/1920)) ([0c7e4f6](https://github.com/googleapis/nodejs-storage/commit/0c7e4f6ade4cee1715e14b3cd9abf2aa2a56c0b9)) + +### [5.19.4](https://github.com/googleapis/nodejs-storage/compare/v5.19.3...v5.19.4) (2022-04-28) + + +### Bug Fixes + +* don't modify passed in options ([#1895](https://github.com/googleapis/nodejs-storage/issues/1895)) ([cd80ca3](https://github.com/googleapis/nodejs-storage/commit/cd80ca318a2b10379b8b166a59f3943b97576475)) + +### [5.19.3](https://github.com/googleapis/nodejs-storage/compare/v5.19.2...v5.19.3) (2022-04-20) + + +### Bug Fixes + +* export idempotencystrategy and preconditionoptions from index ([#1880](https://github.com/googleapis/nodejs-storage/issues/1880)) ([8aafe04](https://github.com/googleapis/nodejs-storage/commit/8aafe0453a8e2dc41f848dd5165d3e86d6a160ed)) + +### [5.19.2](https://github.com/googleapis/nodejs-storage/compare/v5.19.1...v5.19.2) (2022-04-14) + + +### Bug Fixes + +* deleting, getting, and getting metadata for notifications ([#1872](https://github.com/googleapis/nodejs-storage/issues/1872)) ([451570e](https://github.com/googleapis/nodejs-storage/commit/451570e6038a1b91b5723db9b941cd916fd76348)) + +### [5.19.1](https://github.com/googleapis/nodejs-storage/compare/v5.19.0...v5.19.1) (2022-04-08) + + +### Bug Fixes + +* prevent retrying 200 response ([#1857](https://github.com/googleapis/nodejs-storage/issues/1857)) ([638a47b](https://github.com/googleapis/nodejs-storage/commit/638a47b4e7ecc6e94b3b11d1ccc7c52afdeaafe1)) + +## [5.19.0](https://github.com/googleapis/nodejs-storage/compare/v5.18.3...v5.19.0) (2022-04-06) + + +### Features + +* Dual Region Support ([#1814](https://github.com/googleapis/nodejs-storage/issues/1814)) ([caf7ee5](https://github.com/googleapis/nodejs-storage/commit/caf7ee561fd640b0daea92c7837c47e66070c30c)) + +### [5.18.3](https://github.com/googleapis/nodejs-storage/compare/v5.18.2...v5.18.3) (2022-03-28) + + +### Bug Fixes + +* encode name portion when calling publicUrl function ([#1828](https://github.com/googleapis/nodejs-storage/issues/1828)) ([5522b35](https://github.com/googleapis/nodejs-storage/commit/5522b35e83857421a00e71c4e93ba6ae0ffccb90)) +* fixed typo ([#1803](https://github.com/googleapis/nodejs-storage/issues/1803)) ([be70dae](https://github.com/googleapis/nodejs-storage/commit/be70dae33751ddc3e0ae5a55b5cdbf2002a42932)) + +### [5.18.2](https://github.com/googleapis/nodejs-storage/compare/v5.18.1...v5.18.2) (2022-02-16) + + +### Bug Fixes + +* resumable uploads should respect autoRetry & multipart uploads should correctly use preconditions ([#1779](https://github.com/googleapis/nodejs-storage/issues/1779)) ([1e72586](https://github.com/googleapis/nodejs-storage/commit/1e725867dce8f78070435b96b65f97f2253c0e80)) + +### [5.18.1](https://github.com/googleapis/nodejs-storage/compare/v5.18.0...v5.18.1) (2022-01-26) + + +### Bug Fixes + +* **gcs-resumable-upload:** Stop Duplicate Response Handlers on Retries ([#1764](https://github.com/googleapis/nodejs-storage/issues/1764)) ([fe44871](https://github.com/googleapis/nodejs-storage/commit/fe4487187aa405e7d7f8e0bec485bbddb76ea050)) + +## [5.18.0](https://github.com/googleapis/nodejs-storage/compare/v5.17.0...v5.18.0) (2022-01-18) + + +### Features + +* Expose `chunkSize` param for `CreateResumableUploadOptions` ([#1754](https://github.com/googleapis/nodejs-storage/issues/1754)) ([3acfd5b](https://github.com/googleapis/nodejs-storage/commit/3acfd5b2412d046c471d8d707023e034dc1a167a)) + +## [5.17.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.16.1...v5.17.0) (2022-01-10) + + +### Features + +* add support for rpo (turbo replication) metadata field when cre… ([#1648](https://www.github.com/googleapis/nodejs-storage/issues/1648)) ([291e6ef](https://www.github.com/googleapis/nodejs-storage/commit/291e6ef48efcfca55b4a7dca8868a57c0eeec89b)) + + +### Bug Fixes + +* remove compodoc dev dependency ([#1745](https://www.github.com/googleapis/nodejs-storage/issues/1745)) ([809bf11](https://www.github.com/googleapis/nodejs-storage/commit/809bf11b8a2a2203db82aec38b6a6023a805bd62)) + +### [5.16.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.16.0...v5.16.1) (2021-11-29) + + +### Bug Fixes + +* change properties with function value to methods ([#1715](https://www.github.com/googleapis/nodejs-storage/issues/1715)) ([c365254](https://www.github.com/googleapis/nodejs-storage/commit/c36525402da8e748971473b1cdd2423e8fd953e1)) +* revert skip validation ([#1718](https://www.github.com/googleapis/nodejs-storage/issues/1718)) ([0c75e33](https://www.github.com/googleapis/nodejs-storage/commit/0c75e33eb0291aa7dfc704c86733f4c0dc78d322)) +* stop File.download from truncating output file on failure ([#1720](https://www.github.com/googleapis/nodejs-storage/issues/1720)) ([d77979b](https://www.github.com/googleapis/nodejs-storage/commit/d77979b1003dbb89cd9d4725330de50b1f8d9262)) + +## [5.16.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.5...v5.16.0) (2021-11-09) + + +### Features + +* improved error messages for resumable uploads ([#1708](https://www.github.com/googleapis/nodejs-storage/issues/1708)) ([50cdbb6](https://www.github.com/googleapis/nodejs-storage/commit/50cdbb6f730ee7f96cb598c4cda412fc4bcc8807)) + + +### Bug Fixes + +* add scenario 3 conformance tests ([#1702](https://www.github.com/googleapis/nodejs-storage/issues/1702)) ([e16a3a5](https://www.github.com/googleapis/nodejs-storage/commit/e16a3a5eb09a388743259d54c31e62d7fc220bf0)) +* retry uri creation dep update & conformance tests ([#1700](https://www.github.com/googleapis/nodejs-storage/issues/1700)) ([d265f8c](https://www.github.com/googleapis/nodejs-storage/commit/d265f8c5e4e6a8c8239e959dfb4d0acbf4cdfe0a)) + +### [5.15.6](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.5...v5.15.6) (2021-11-05) + + +### Bug Fixes + +* add scenario 3 conformance tests ([#1702](https://www.github.com/googleapis/nodejs-storage/issues/1702)) ([e16a3a5](https://www.github.com/googleapis/nodejs-storage/commit/e16a3a5eb09a388743259d54c31e62d7fc220bf0)) +* retry uri creation dep update & conformance tests ([#1700](https://www.github.com/googleapis/nodejs-storage/issues/1700)) ([d265f8c](https://www.github.com/googleapis/nodejs-storage/commit/d265f8c5e4e6a8c8239e959dfb4d0acbf4cdfe0a)) + +### [5.15.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.4...v5.15.5) (2021-11-03) + + +### Bug Fixes + +* **deps:** update dependency mime to v3 ([#1696](https://www.github.com/googleapis/nodejs-storage/issues/1696)) ([f337208](https://www.github.com/googleapis/nodejs-storage/commit/f33720883bb6d797d2fb89d5e6ff9584d216be74)) +* explicitly define function type of getFilesStream ([#1697](https://www.github.com/googleapis/nodejs-storage/issues/1697)) ([c950c23](https://www.github.com/googleapis/nodejs-storage/commit/c950c23742bb9291a3e15b95ae0ee4a13466c361)) + +### [5.15.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.3...v5.15.4) (2021-11-01) + + +### Bug Fixes + +* check e is not null ([#1692](https://www.github.com/googleapis/nodejs-storage/issues/1692)) ([56ff485](https://www.github.com/googleapis/nodejs-storage/commit/56ff485cbe28ba048c9a689711b034c416853b1f)) + +### [5.15.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.2...v5.15.3) (2021-10-14) + + +### Bug Fixes + +* do not use src precondition options in copy. ([#1666](https://www.github.com/googleapis/nodejs-storage/issues/1666)) ([678ae77](https://www.github.com/googleapis/nodejs-storage/commit/678ae77dfb2c1eb2272734f34315b4d0ec726076)) + +### [5.15.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.1...v5.15.2) (2021-10-13) + + +### Bug Fixes + +* remove bucket preconditions from deleteFiles, it is a file operation not bucket ([#1661](https://www.github.com/googleapis/nodejs-storage/issues/1661)) ([6b7a06d](https://www.github.com/googleapis/nodejs-storage/commit/6b7a06defe1a3cadc6fad9258ff3fb01a2ecce0a)) + +### [5.15.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.15.0...v5.15.1) (2021-10-12) + + +### Bug Fixes + +* check generation on source files not metageneration on bucket ([#1654](https://www.github.com/googleapis/nodejs-storage/issues/1654)) ([760231c](https://www.github.com/googleapis/nodejs-storage/commit/760231ca520f4eedf878c245489cb07f95e153af)) + +## [5.15.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.7...v5.15.0) (2021-10-07) + + +### Features + +* add support for useAuthWithCustomEndpoint option ([#1645](https://www.github.com/googleapis/nodejs-storage/issues/1645)) ([d11d6b4](https://www.github.com/googleapis/nodejs-storage/commit/d11d6b4b2678cb240928e2dfe20f983d2ae896f6)) + + +### Bug Fixes + +* update common dep ([#1644](https://www.github.com/googleapis/nodejs-storage/issues/1644)) ([793467f](https://www.github.com/googleapis/nodejs-storage/commit/793467f25abf46eb7ba5e6cd1b80f735faa035dc)) + +### [5.14.8](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.7...v5.14.8) (2021-10-06) + + +### Bug Fixes + +* update common dep ([#1644](https://www.github.com/googleapis/nodejs-storage/issues/1644)) ([793467f](https://www.github.com/googleapis/nodejs-storage/commit/793467f25abf46eb7ba5e6cd1b80f735faa035dc)) + +### [5.14.7](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.6...v5.14.7) (2021-10-06) + + +### Bug Fixes + +* file preconditions should respect ifGenerationMatch not ifMetagenerationMatch ([#1642](https://www.github.com/googleapis/nodejs-storage/issues/1642)) ([eb6f31f](https://www.github.com/googleapis/nodejs-storage/commit/eb6f31f3f6c439e21bbc0cd46f32a7327e15f65e)) + +### [5.14.6](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.5...v5.14.6) (2021-10-06) + + +### Bug Fixes + +* pass precondition opts to new file ([#1638](https://www.github.com/googleapis/nodejs-storage/issues/1638)) ([1523ca9](https://www.github.com/googleapis/nodejs-storage/commit/1523ca962fda070cf60e5b81d062e3a291461c83)) + +### [5.14.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.4...v5.14.5) (2021-10-01) + + +### Bug Fixes + +* fix logic for buckets that do not have a generation ([#1634](https://www.github.com/googleapis/nodejs-storage/issues/1634)) ([9069bdc](https://www.github.com/googleapis/nodejs-storage/commit/9069bdc9a0ab495ab62033f432ea0e180e3b182e)) +* updated connection reset string ([#1632](https://www.github.com/googleapis/nodejs-storage/issues/1632)) ([b841d5b](https://www.github.com/googleapis/nodejs-storage/commit/b841d5b98c3d98b5f1dc7776a887159294eb0b36)) + +### [5.14.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.3...v5.14.4) (2021-09-27) + + +### Bug Fixes + +* respect precondition settings from constructors ([#1617](https://www.github.com/googleapis/nodejs-storage/issues/1617)) ([6a48942](https://www.github.com/googleapis/nodejs-storage/commit/6a48942e540e3d96e2a5b396b8e74cbe732178be)) + +### [5.14.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.2...v5.14.3) (2021-09-22) + + +### Bug Fixes + +* set autoretry back to instance value at end of file functions ([#1604](https://www.github.com/googleapis/nodejs-storage/issues/1604)) ([db3b59d](https://www.github.com/googleapis/nodejs-storage/commit/db3b59d731c9760d55bf112c211bdd644da911c4)) + +### [5.14.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.1...v5.14.2) (2021-09-13) + + +### Bug Fixes + +* **build:** set default branch to main ([#1587](https://www.github.com/googleapis/nodejs-storage/issues/1587)) ([b39ce95](https://www.github.com/googleapis/nodejs-storage/commit/b39ce95a2ec9d8dd2114863898181ea10670d962)) + +### [5.14.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.14.0...v5.14.1) (2021-09-08) + + +### Bug Fixes + +* **types:** remove duplicated definition of BucketOptions and make sure proper version is exported ([#1583](https://www.github.com/googleapis/nodejs-storage/issues/1583)) ([d8f4bc5](https://www.github.com/googleapis/nodejs-storage/commit/d8f4bc59bd3e2cebfe6494842414cd9f7e32018f)) + +## [5.14.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.2...v5.14.0) (2021-08-26) + + +### Features + +* retries for conditionally idempotent operations ([#1561](https://www.github.com/googleapis/nodejs-storage/issues/1561)) ([653f4b4](https://www.github.com/googleapis/nodejs-storage/commit/653f4b488e8603e4008e51b45920fb7de7138eab)) + + +### Bug Fixes + +* allow retries of metadatata operations in system tests ([#1568](https://www.github.com/googleapis/nodejs-storage/issues/1568)) ([9398566](https://www.github.com/googleapis/nodejs-storage/commit/939856680d279dcd9587328d0cc58f789b022f5a)) + +### [5.13.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.1...v5.13.2) (2021-08-26) + + +### Bug Fixes + +* update getLabels definition to actually allow no arguments when used in typescript ([#1559](https://www.github.com/googleapis/nodejs-storage/issues/1559)) ([176dbb5](https://www.github.com/googleapis/nodejs-storage/commit/176dbb5223f4442d10fd098ffa2cda5cf12144f2)) + +### [5.13.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.13.0...v5.13.1) (2021-08-18) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to v2 ([#1537](https://www.github.com/googleapis/nodejs-storage/issues/1537)) ([9d0d69e](https://www.github.com/googleapis/nodejs-storage/commit/9d0d69eaf908817dec274abe915bd96bb22c663a)) + +## [5.13.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.12.0...v5.13.0) (2021-08-09) + + +### Features + +* Precondition checks ([#1523](https://www.github.com/googleapis/nodejs-storage/issues/1523)) ([7c24417](https://www.github.com/googleapis/nodejs-storage/commit/7c244178649f120cfefe58994b515da7ca6b7ffb)) + +## [5.12.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.11.1...v5.12.0) (2021-08-03) + + +### Features + +* pass precondition parameters to gcs-resumable-upload ([#1516](https://www.github.com/googleapis/nodejs-storage/issues/1516)) ([65211dd](https://www.github.com/googleapis/nodejs-storage/commit/65211ddb8ae19229154b4aca3d5ff97f2aaa9f56)) + +### [5.11.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.11.0...v5.11.1) (2021-08-02) + + +### Bug Fixes + +* don't retry non-idempotent functions ([#1517](https://www.github.com/googleapis/nodejs-storage/issues/1517)) ([c938795](https://www.github.com/googleapis/nodejs-storage/commit/c938795e8a5f14ba7724f2d0e334310dd8e8f207)) + +## [5.11.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.10.0...v5.11.0) (2021-07-23) + + +### Features + +* retries for resumable bucket.upload and file.save ([#1511](https://www.github.com/googleapis/nodejs-storage/issues/1511)) ([9bf163c](https://www.github.com/googleapis/nodejs-storage/commit/9bf163c3e3569bad1440940ff1a6bfd42404bb32)) + +## [5.10.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.9.0...v5.10.0) (2021-07-22) + + +### Features + +* retry multipart Bucket.upload ([#1509](https://www.github.com/googleapis/nodejs-storage/issues/1509)) ([730d0a0](https://www.github.com/googleapis/nodejs-storage/commit/730d0a0d4a6aa5192d998c54292d3423d3ddeaaa)) + +## [5.9.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.5...v5.9.0) (2021-07-21) + + +### Features + +* customize retry behavior implementation ([#1474](https://www.github.com/googleapis/nodejs-storage/issues/1474)) ([#1493](https://www.github.com/googleapis/nodejs-storage/issues/1493)) ([49008e3](https://www.github.com/googleapis/nodejs-storage/commit/49008e313b89ce6035543bf2cf1e60e253404520)) + +### [5.8.5](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.4...v5.8.5) (2021-05-04) + + +### Bug Fixes + +* **deps:** updated gcs-resumable-upload dependency ([#1459](https://www.github.com/googleapis/nodejs-storage/issues/1459)) ([afaccc7](https://www.github.com/googleapis/nodejs-storage/commit/afaccc70375a2c778e4306a59bf8a86736c17f6c)) + +### [5.8.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.3...v5.8.4) (2021-04-19) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to v1 ([#1434](https://www.github.com/googleapis/nodejs-storage/issues/1434)) ([91ee6ca](https://www.github.com/googleapis/nodejs-storage/commit/91ee6cab38769d36b00d702e17b222518ad4e752)) +* don't fail if ~/.config doesn't exist ([#1428](https://www.github.com/googleapis/nodejs-storage/issues/1428)) ([3cfaba1](https://www.github.com/googleapis/nodejs-storage/commit/3cfaba19d4223c68f4382c06674f135838d32eb8)) + +### [5.8.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.2...v5.8.3) (2021-03-29) + + +### Bug Fixes + +* update CopyOptions type to include cacheControl, contentType and contentEncoding ([#1426](https://www.github.com/googleapis/nodejs-storage/issues/1426)) ([efa5bb8](https://www.github.com/googleapis/nodejs-storage/commit/efa5bb8a22ab68c0c3e8549850e5db4d57ff29bb)) + +### [5.8.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.1...v5.8.2) (2021-03-23) + + +### Bug Fixes + +* **perf:** pull hashes without refreshing metadata ([#1419](https://www.github.com/googleapis/nodejs-storage/issues/1419)) ([f3ec627](https://www.github.com/googleapis/nodejs-storage/commit/f3ec6278df3c3df4d4cddf5293be4dda95f0cbf7)) + +### [5.8.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.8.0...v5.8.1) (2021-03-02) + + +### Bug Fixes + +* deprecate `options.promise` and sync options with Service ([#1391](https://www.github.com/googleapis/nodejs-storage/issues/1391)) ([59cfe27](https://www.github.com/googleapis/nodejs-storage/commit/59cfe272de16c41e9c768953a25677294f520cc7)) +* **types:** support metadata override in file.copy() ([#1406](https://www.github.com/googleapis/nodejs-storage/issues/1406)) ([dda6d30](https://www.github.com/googleapis/nodejs-storage/commit/dda6d305638a07cbca188e459544393d8624f4f0)) + +## [5.8.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.4...v5.8.0) (2021-02-18) + + +### Features + +* adds support workload identity federation ([#1404](https://www.github.com/googleapis/nodejs-storage/issues/1404)) ([7d3a3f1](https://www.github.com/googleapis/nodejs-storage/commit/7d3a3f148361e56cbcd4dc4c8fb178b75f9208bf)) + +### [5.7.4](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.3...v5.7.4) (2021-02-01) + + +### Bug Fixes + +* specified acceptable types for File.save data parameter ([#1388](https://www.github.com/googleapis/nodejs-storage/issues/1388)) ([607f6c1](https://www.github.com/googleapis/nodejs-storage/commit/607f6c1c8b9ae5414513957f54a5de2490c454b1)) + +### [5.7.3](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.2...v5.7.3) (2021-01-22) + + +### Bug Fixes + +* retry multipart uploads on File.save() ([#1385](https://www.github.com/googleapis/nodejs-storage/issues/1385)) ([4dec8c1](https://www.github.com/googleapis/nodejs-storage/commit/4dec8c1a362e3f80cbbf49f8bf7e7eaa2e2ce3bc)) + +### [5.7.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.1...v5.7.2) (2021-01-11) + + +### Bug Fixes + +* deprecated directory. prefix should be used instead ([#1370](https://www.github.com/googleapis/nodejs-storage/issues/1370)) ([fae4c06](https://www.github.com/googleapis/nodejs-storage/commit/fae4c0635909a831d65bd3111b27250795d4735b)) + +### [5.7.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.7.0...v5.7.1) (2021-01-07) + + +### Bug Fixes + +* bump dependencies See [#1372](https://www.github.com/googleapis/nodejs-storage/issues/1372) for details ([#1375](https://www.github.com/googleapis/nodejs-storage/issues/1375)) ([7cf0264](https://www.github.com/googleapis/nodejs-storage/commit/7cf0264c0cea2e0d668818924aaa737381c07bfa)) + +## [5.7.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.6.0...v5.7.0) (2020-12-10) + + +### Features + +* support metadata updates from makePrivate() methods ([#1355](https://www.github.com/googleapis/nodejs-storage/issues/1355)) ([3db1e83](https://www.github.com/googleapis/nodejs-storage/commit/3db1e832af1cd3a394305b0a1120953d85f86249)) + + +### Bug Fixes + +* capitalize action in Bucket#addLifecycleRule ([#1358](https://www.github.com/googleapis/nodejs-storage/issues/1358)) ([205f39f](https://www.github.com/googleapis/nodejs-storage/commit/205f39f970639c06258fb1e0cd18a4d963729262)) +* jsdoc for bucket ([#1359](https://www.github.com/googleapis/nodejs-storage/issues/1359)) ([5fa530a](https://www.github.com/googleapis/nodejs-storage/commit/5fa530ab909e837558deecd7e1aa50a7cc4f5830)) + +## [5.6.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.5.1...v5.6.0) (2020-12-02) + + +### Features + +* allow ignoring 404 errors during delete() operations ([#1347](https://www.github.com/googleapis/nodejs-storage/issues/1347)) ([dab0e7d](https://www.github.com/googleapis/nodejs-storage/commit/dab0e7d9345499411cec57186ef7404026f041eb)) + +### [5.5.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.5.0...v5.5.1) (2020-12-01) + + +### Bug Fixes + +* add publicUrl to exclude promisifyAll ([#1339](https://www.github.com/googleapis/nodejs-storage/issues/1339)) ([ea2c2c9](https://www.github.com/googleapis/nodejs-storage/commit/ea2c2c9d670b8a8eefffa3e67dd2599135c0d933)) +* error if both `storageClass` and specific storage class name are provided ([#1323](https://www.github.com/googleapis/nodejs-storage/issues/1323)) ([91a65f8](https://www.github.com/googleapis/nodejs-storage/commit/91a65f86fe5f9608437b91d6e67192c78b0e8d7b)) +* only throw if `storageClass` and specific storage class name provide different values ([#1346](https://www.github.com/googleapis/nodejs-storage/issues/1346)) ([1765608](https://www.github.com/googleapis/nodejs-storage/commit/1765608430d98c555e4a7431c28cd5878e65c7df)) +* **docs:** explain manual pagination and add usage sample ([#1317](https://www.github.com/googleapis/nodejs-storage/issues/1317)) ([16b779d](https://www.github.com/googleapis/nodejs-storage/commit/16b779de912f6ac082ec5ee3d904b125a5526485)) + +## [5.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.4.0...v5.5.0) (2020-11-03) + + +### Features + +* accessibleAt v4 signed urls ([#1328](https://www.github.com/googleapis/nodejs-storage/issues/1328)) ([1e0295e](https://www.github.com/googleapis/nodejs-storage/commit/1e0295eebd1120ce6cbcabee4cf1aaa825455d4b)) + +## [5.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.3.0...v5.4.0) (2020-10-29) + + +### Features + +* **userAgent:** allow for optional user agent to be provided ([#1313](https://www.github.com/googleapis/nodejs-storage/issues/1313)) ([13a064f](https://www.github.com/googleapis/nodejs-storage/commit/13a064f3e7342640c5c6e64e060e8558062a31c7)) +* add custom time field in metadata sample ([#1285](https://www.github.com/googleapis/nodejs-storage/issues/1285)) ([9e3474f](https://www.github.com/googleapis/nodejs-storage/commit/9e3474f87dabd300d914c0f44f49a13dae34227d)) +* add File#rename ([#1311](https://www.github.com/googleapis/nodejs-storage/issues/1311)) ([c77eaa3](https://www.github.com/googleapis/nodejs-storage/commit/c77eaa363be18b6b9ddd1f7b505e23be8cd5bf98)) +* public url of a file ([#1324](https://www.github.com/googleapis/nodejs-storage/issues/1324)) ([5ec256e](https://www.github.com/googleapis/nodejs-storage/commit/5ec256e58d04dbcb8dee8cde7460b306da5ff880)) + + +### Bug Fixes + +* set customEndpoint for custom environments ([#1316](https://www.github.com/googleapis/nodejs-storage/issues/1316)) ([60910e1](https://www.github.com/googleapis/nodejs-storage/commit/60910e1903f95a0abca6b9855d11833b32666e2c)) +* **deps:** update dependency gaxios to v4 ([#1322](https://www.github.com/googleapis/nodejs-storage/issues/1322)) ([f9b16d8](https://www.github.com/googleapis/nodejs-storage/commit/f9b16d82e5c6a26d76f721a277cf07b518b63a42)) +* moved publicUrl comment ([#1327](https://www.github.com/googleapis/nodejs-storage/issues/1327)) ([249ed2c](https://www.github.com/googleapis/nodejs-storage/commit/249ed2c4e5fe9cfb029ca8e60c1bafab5c370a48)) +* self-upload files for Unicode system tests ([#1318](https://www.github.com/googleapis/nodejs-storage/issues/1318)) ([e826a52](https://www.github.com/googleapis/nodejs-storage/commit/e826a526c3a5a8515cc7a07c15f86fcfb0fb2f93)) +* **deps:** update dependency duplexify to v4 ([#1302](https://www.github.com/googleapis/nodejs-storage/issues/1302)) ([1ce3d89](https://www.github.com/googleapis/nodejs-storage/commit/1ce3d895b3f5d12776f0634b41b1baef7aa4491c)) +* **deps:** update dependency yargs to v16 ([#1289](https://www.github.com/googleapis/nodejs-storage/issues/1289)) ([12f2133](https://www.github.com/googleapis/nodejs-storage/commit/12f2133b7e3ec9612b3694d93b9ff756cb1bbd66)) +* **types:** add `CreateBucketRequest.storageClass` ([#1299](https://www.github.com/googleapis/nodejs-storage/issues/1299)) ([d854c26](https://www.github.com/googleapis/nodejs-storage/commit/d854c2609eb87886a010e726daa5f1339ee9ea38)), closes [#1248](https://www.github.com/googleapis/nodejs-storage/issues/1248) + +## [5.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.2.0...v5.3.0) (2020-08-24) + + +### Features + +* support noncurrent time object lifecycle condition ([#1216](https://www.github.com/googleapis/nodejs-storage/issues/1216)) ([d198aff](https://www.github.com/googleapis/nodejs-storage/commit/d198affa98b0dd027d6628eaff9fcd2dca5c7b47)) + +## [5.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.2...v5.2.0) (2020-08-10) + + +### Features + +* remove through2 dependency ([#1240](https://www.github.com/googleapis/nodejs-storage/issues/1240)) ([97c73dd](https://www.github.com/googleapis/nodejs-storage/commit/97c73ddec4dbb7cbf7a1486c16ed24dbe9be6d83)) + + +### Bug Fixes + +* support request interceptors for file uploads ([#1225](https://www.github.com/googleapis/nodejs-storage/issues/1225)) ([aa4e4ec](https://www.github.com/googleapis/nodejs-storage/commit/aa4e4ecf49c2e67fdeb90d54e06335fe8671c185)) +* **deps:** update dependency date-and-time to ^0.14.0 ([#1263](https://www.github.com/googleapis/nodejs-storage/issues/1263)) ([408aff9](https://www.github.com/googleapis/nodejs-storage/commit/408aff9234b30f4d68bc9027fbc3088ab059578a)) + +### [5.1.2](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.1...v5.1.2) (2020-07-06) + + +### Bug Fixes + +* **deps:** update dependency through2 to v4 ([#1226](https://www.github.com/googleapis/nodejs-storage/issues/1226)) ([2be25e8](https://www.github.com/googleapis/nodejs-storage/commit/2be25e8fbd4bc60f8e702c2120cf99510508cd15)) + +### [5.1.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.1.0...v5.1.1) (2020-06-19) + + +### Bug Fixes + +* **deps:** update dependency p-limit to v3 ([#1210](https://www.github.com/googleapis/nodejs-storage/issues/1210)) ([7da0379](https://www.github.com/googleapis/nodejs-storage/commit/7da03797791ed20ffbbc6456c7cfad1653c59e76)) +* update node issue template ([#1220](https://www.github.com/googleapis/nodejs-storage/issues/1220)) ([eaac92b](https://www.github.com/googleapis/nodejs-storage/commit/eaac92bb789aea32718280fb67c6496022fa56d3)) + +## [5.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v5.0.1...v5.1.0) (2020-06-11) + + +### Features + +* **secrets:** begin migration to secret manager from keystore ([#1211](https://www.github.com/googleapis/nodejs-storage/issues/1211)) ([11b16ae](https://www.github.com/googleapis/nodejs-storage/commit/11b16ae1b06273ab169d7b1f65a54196956ef4ea)) +* allow setting timeouts on uploads ([#1208](https://www.github.com/googleapis/nodejs-storage/issues/1208)) ([01b3691](https://www.github.com/googleapis/nodejs-storage/commit/01b3691bbfc4a93e2229ea07184637479c515183)) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/pubsub to v2 ([#1201](https://www.github.com/googleapis/nodejs-storage/issues/1201)) ([c684b2a](https://www.github.com/googleapis/nodejs-storage/commit/c684b2abe99169a8801d76fda38a4cbfba91f417)) + +### [5.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v5.0.0...v5.0.1) (2020-05-20) + + +### Bug Fixes + +* StorageOptions.apiEndpoint overrides simple and resumable uploads ([#1161](https://www.github.com/googleapis/nodejs-storage/issues/1161)) ([cf8ea5c](https://www.github.com/googleapis/nodejs-storage/commit/cf8ea5cbbecea50233b1681c7f8aba121ec37a1a)) +* **types:** assert in typescript 3.7 ([#1198](https://www.github.com/googleapis/nodejs-storage/issues/1198)) ([d5aa8b7](https://www.github.com/googleapis/nodejs-storage/commit/d5aa8b7892366c9fbbdc33dd001f644da61bb22e)) + +## [5.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.7.0...v5.0.0) (2020-05-13) + + +### ⚠ BREAKING CHANGES + +* automatically detect contentType if not provided (#1190) +* drop keepAcl parameter in file copy (#1166) +* drop support for node.js 8.x + +### Features + +* automatically detect contentType if not provided ([#1190](https://www.github.com/googleapis/nodejs-storage/issues/1190)) ([b31ba4a](https://www.github.com/googleapis/nodejs-storage/commit/b31ba4a11399b57538ddf0d6ca2e10b2aa3fbc3a)) +* enable bytes read tracking ([#1074](https://www.github.com/googleapis/nodejs-storage/issues/1074)) ([0776a04](https://www.github.com/googleapis/nodejs-storage/commit/0776a044f3b2149b485e114369e952688df75645)) + + +### Bug Fixes + +* **bucket:** Only disable resumable uploads for bucket.upload (fixes [#1133](https://www.github.com/googleapis/nodejs-storage/issues/1133)) ([#1135](https://www.github.com/googleapis/nodejs-storage/issues/1135)) ([2c20148](https://www.github.com/googleapis/nodejs-storage/commit/2c201486b7b2d3146846ac96c877a904c4a674b0)), closes [/github.com/googleapis/nodejs-storage/pull/1135#issuecomment-620070038](https://www.github.com/googleapis//github.com/googleapis/nodejs-storage/pull/1135/issues/issuecomment-620070038) +* add whitespace to generateV4SignedPolicy ([#1136](https://www.github.com/googleapis/nodejs-storage/issues/1136)) ([dcee78b](https://www.github.com/googleapis/nodejs-storage/commit/dcee78b98da23b02fe7d2f13a9270546bc07bba8)) +* apache license URL ([#468](https://www.github.com/googleapis/nodejs-storage/issues/468)) ([#1151](https://www.github.com/googleapis/nodejs-storage/issues/1151)) ([e8116d3](https://www.github.com/googleapis/nodejs-storage/commit/e8116d3c6fa7412858692e67745b514eef78850e)) +* Point to team in correct org ([#1185](https://www.github.com/googleapis/nodejs-storage/issues/1185)) ([0bb1909](https://www.github.com/googleapis/nodejs-storage/commit/0bb19098013acf71cc3842f78ff333a8e356331a)) +* **deps:** update dependency @google-cloud/common to v3 ([#1134](https://www.github.com/googleapis/nodejs-storage/issues/1134)) ([774ac5c](https://www.github.com/googleapis/nodejs-storage/commit/774ac5c75f02238418cc8ed7242297ea573ca9cb)) +* **deps:** update dependency @google-cloud/paginator to v3 ([#1131](https://www.github.com/googleapis/nodejs-storage/issues/1131)) ([c1614d9](https://www.github.com/googleapis/nodejs-storage/commit/c1614d98e3047db379e09299b1014e80d73ed52f)) +* **deps:** update dependency @google-cloud/promisify to v2 ([#1127](https://www.github.com/googleapis/nodejs-storage/issues/1127)) ([06624a5](https://www.github.com/googleapis/nodejs-storage/commit/06624a534cd1fdbc38455eee8d89f9f60ba75758)) +* **deps:** update dependency uuid to v8 ([#1170](https://www.github.com/googleapis/nodejs-storage/issues/1170)) ([6a98d64](https://www.github.com/googleapis/nodejs-storage/commit/6a98d64831baf1ca1ec2f03ecc4914745cba1c86)) +* **deps:** update gcs-resumable-upload, remove gitnpm usage ([#1186](https://www.github.com/googleapis/nodejs-storage/issues/1186)) ([c78c9cd](https://www.github.com/googleapis/nodejs-storage/commit/c78c9cde49dccb2fcd4ce10e4e9f8299d65f6838)) +* **v4-policy:** encode special characters ([#1169](https://www.github.com/googleapis/nodejs-storage/issues/1169)) ([6e48539](https://www.github.com/googleapis/nodejs-storage/commit/6e48539d76ca27e6f4c6cf2ac0872970f7391fed)) +* sync to googleapis/conformance-tests@fa559a1 ([#1167](https://www.github.com/googleapis/nodejs-storage/issues/1167)) ([5500446](https://www.github.com/googleapis/nodejs-storage/commit/550044619d2f17a1977c83bce5df915c6dc9578c)), closes [#1168](https://www.github.com/googleapis/nodejs-storage/issues/1168) + + +### Miscellaneous Chores + +* drop keepAcl parameter in file copy ([#1166](https://www.github.com/googleapis/nodejs-storage/issues/1166)) ([5a4044a](https://www.github.com/googleapis/nodejs-storage/commit/5a4044a8ba13f248fc4f791248f797eb0f1f3c16)) + + +### Build System + +* drop support for node.js 8.x ([b80c025](https://www.github.com/googleapis/nodejs-storage/commit/b80c025f106052fd25554c64314b3b3520e829b5)) + +## [4.7.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.6.0...v4.7.0) (2020-03-26) + + +### Features + +* add remove conditional binding ([#1107](https://www.github.com/googleapis/nodejs-storage/issues/1107)) ([2143705](https://www.github.com/googleapis/nodejs-storage/commit/21437053e9497aa95ef37a865ffbdbaf4134138f)) +* v4 POST with signed policy ([#1102](https://www.github.com/googleapis/nodejs-storage/issues/1102)) ([a3d5b88](https://www.github.com/googleapis/nodejs-storage/commit/a3d5b88b8d3d25b6e16808eb5c1425aa0a8c5ecc)), closes [#1125](https://www.github.com/googleapis/nodejs-storage/issues/1125) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.13.0 ([#1106](https://www.github.com/googleapis/nodejs-storage/issues/1106)) ([b759605](https://www.github.com/googleapis/nodejs-storage/commit/b7596058e130ee2d82dc2221f24220b83c04fdae)) +* **deps:** update dependency gaxios to v3 ([#1129](https://www.github.com/googleapis/nodejs-storage/issues/1129)) ([5561452](https://www.github.com/googleapis/nodejs-storage/commit/5561452cb0b6e5a1dcabea6973db57799422abb7)) +* **types:** wrap GetSignedUrlResponse ([#1119](https://www.github.com/googleapis/nodejs-storage/issues/1119)) ([0c7ac16](https://www.github.com/googleapis/nodejs-storage/commit/0c7ac161f808201562f60710b9ec7bce4fbf819f)) + +## [4.6.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.5.0...v4.6.0) (2020-03-13) + + +### Features + +* **storage:** Add versioning as optional metadata param of createBucket ([#1090](https://www.github.com/googleapis/nodejs-storage/issues/1090)) ([39869e3](https://www.github.com/googleapis/nodejs-storage/commit/39869e3c6c62eabe1840f0fd884b361265e2cb76)) + +## [4.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.4.0...v4.5.0) (2020-03-06) + + +### Features + +* **v4:** support v4 signed URL for GA ([#1072](https://www.github.com/googleapis/nodejs-storage/issues/1072)) ([79d7b8f](https://www.github.com/googleapis/nodejs-storage/commit/79d7b8f5a187f23e58bf84db3f4b8c4b1a921978)), closes [#1051](https://www.github.com/googleapis/nodejs-storage/issues/1051) [#1056](https://www.github.com/googleapis/nodejs-storage/issues/1056) [#1066](https://www.github.com/googleapis/nodejs-storage/issues/1066) [#1068](https://www.github.com/googleapis/nodejs-storage/issues/1068) [#1069](https://www.github.com/googleapis/nodejs-storage/issues/1069) [#1067](https://www.github.com/googleapis/nodejs-storage/issues/1067) [#1070](https://www.github.com/googleapis/nodejs-storage/issues/1070) + +## [4.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.2...v4.4.0) (2020-03-03) + + +### Features + +* **bucket:** support single source in combine ([#1076](https://www.github.com/googleapis/nodejs-storage/issues/1076)) ([#1085](https://www.github.com/googleapis/nodejs-storage/issues/1085)) ([251a617](https://www.github.com/googleapis/nodejs-storage/commit/251a617791ca4b0e148b741d1931013150becc02)) + +### [4.3.2](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.1...v4.3.2) (2020-03-02) + + +### Bug Fixes + +* **deps:** update dependency uuid to v7 ([#1081](https://www.github.com/googleapis/nodejs-storage/issues/1081)) ([c72d57f](https://www.github.com/googleapis/nodejs-storage/commit/c72d57f6f2982dad512d425ee3f041b43a87c278)) +* **tests:** flaky getSignedPolicy tests ([#1093](https://www.github.com/googleapis/nodejs-storage/issues/1093)) ([531050a](https://www.github.com/googleapis/nodejs-storage/commit/531050a05e5b1eeedb25647417a8ae9df8d76f29)) + +### [4.3.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.3.0...v4.3.1) (2020-02-06) + + +### Bug Fixes + +* **samples:** fix overwritten IAM conditions sample ([#1042](https://www.github.com/googleapis/nodejs-storage/issues/1042)) ([25d839c](https://www.github.com/googleapis/nodejs-storage/commit/25d839ccf421276d8a4c18b2be95004ca832d84d)) +* skip validation for server decompressed objects ([#1063](https://www.github.com/googleapis/nodejs-storage/issues/1063)) ([d6e3738](https://www.github.com/googleapis/nodejs-storage/commit/d6e37382da1ed3b72771770cb9447c62c91f26a5)) +* unhandled promise rejection warning in samples ([#1056](https://www.github.com/googleapis/nodejs-storage/issues/1056)) ([e95ec19](https://www.github.com/googleapis/nodejs-storage/commit/e95ec19756388e6fc4fc8e0d49a40c613ed006c6)) + +## [4.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.2.0...v4.3.0) (2020-01-21) + + +### Features + +* add support for CORS as a first class citizen ([#1020](https://www.github.com/googleapis/nodejs-storage/issues/1020)) ([9fee6d9](https://www.github.com/googleapis/nodejs-storage/commit/9fee6d969a1311a3db18bf523d3614adef0526ce)) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.12.0 ([#1032](https://www.github.com/googleapis/nodejs-storage/issues/1032)) ([a324564](https://www.github.com/googleapis/nodejs-storage/commit/a324564e8f29345412047b7f6296098211e0e831)) +* a couple of typos from the refactorNodeSampleStorage branch ([#1038](https://www.github.com/googleapis/nodejs-storage/issues/1038)) ([8faa6c6](https://www.github.com/googleapis/nodejs-storage/commit/8faa6c6698b3b7ef4857d978482209bc5077e08e)) +* do not modify constructor options ([#974](https://www.github.com/googleapis/nodejs-storage/issues/974)) ([4969148](https://www.github.com/googleapis/nodejs-storage/commit/4969148f5e114d86aa4928109f099ec15d56fda5)) +* refactor getMetadata sample into its own file ([#1008](https://www.github.com/googleapis/nodejs-storage/issues/1008)) ([6ed1af8](https://www.github.com/googleapis/nodejs-storage/commit/6ed1af8aadd6f72cf0957d02e403f7c551166a5c)) +* refactor getRetentionPolicy sample into its own file ([#993](https://www.github.com/googleapis/nodejs-storage/issues/993)) ([47e4ad8](https://www.github.com/googleapis/nodejs-storage/commit/47e4ad8c8a4fd16f4f1d9d6d9bfdc9e30b1ab999)) +* refactor getUniformBucketLevelAccess into its own file ([#981](https://www.github.com/googleapis/nodejs-storage/issues/981)) ([0ba69f1](https://www.github.com/googleapis/nodejs-storage/commit/0ba69f1b3d6093701dac927fa4543d2d911ce8b0)) +* refactor rotateEncryptionKey sample into its own file ([#1030](https://www.github.com/googleapis/nodejs-storage/issues/1030)) ([afdf0fe](https://www.github.com/googleapis/nodejs-storage/commit/afdf0febe8760c0819736961065d134e231a0afa)) +* refactor uploadEncryptedFile sample into its own file ([#1028](https://www.github.com/googleapis/nodejs-storage/issues/1028)) ([ba4520b](https://www.github.com/googleapis/nodejs-storage/commit/ba4520b5f925968717222ffe5d2b1dbcfbea4334)) +* refactoring disableUniformBucketLevelAccess into its own file ([#980](https://www.github.com/googleapis/nodejs-storage/issues/980)) ([1481e20](https://www.github.com/googleapis/nodejs-storage/commit/1481e20d8332ee2806116166fb16028506487d2d)) + +## [4.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.3...v4.2.0) (2020-01-02) + + +### Features + +* **iam:** getIamPolicy to support requestedPolicyVersion ([#960](https://www.github.com/googleapis/nodejs-storage/issues/960)) ([0f38f75](https://www.github.com/googleapis/nodejs-storage/commit/0f38f7597f5e671b4c08830f8d457f9ca2c03cd1)) +* add support for Archive storage class ([#908](https://www.github.com/googleapis/nodejs-storage/issues/908)) ([63f63f4](https://www.github.com/googleapis/nodejs-storage/commit/63f63f448d2d220b1e77a7dcd379f7ac7fc22af3)) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#952](https://www.github.com/googleapis/nodejs-storage/issues/952)) ([1fea60c](https://www.github.com/googleapis/nodejs-storage/commit/1fea60c04fd9762c5506c22df0992cdb8fce4ea0)) +* add createBucket file and updated relevant test ([#940](https://www.github.com/googleapis/nodejs-storage/issues/940)) ([913b43e](https://www.github.com/googleapis/nodejs-storage/commit/913b43e66bb10bd5dbf6b0bca9e65edd48b54234)) +* update PolicyDocument types ([#944](https://www.github.com/googleapis/nodejs-storage/issues/944)) ([b1c05b2](https://www.github.com/googleapis/nodejs-storage/commit/b1c05b27029215c8bc313a8f4f16ff13d5af2391)) + +### [4.1.3](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.2...v4.1.3) (2019-11-18) + + +### Bug Fixes + +* **deps:** update dependency date-and-time to ^0.11.0 ([#934](https://www.github.com/googleapis/nodejs-storage/issues/934)) ([c4bf1c6](https://www.github.com/googleapis/nodejs-storage/commit/c4bf1c616d0a9402237708bddac1341c620f0542)) +* **deps:** update dependency yargs to v15 ([#933](https://www.github.com/googleapis/nodejs-storage/issues/933)) ([f40fe0c](https://www.github.com/googleapis/nodejs-storage/commit/f40fe0c5bd4e9b89ebe007e59986580fae4d7e09)) + +### [4.1.2](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.1...v4.1.2) (2019-11-12) + + +### Bug Fixes + +* do not check access of configPath ([#915](https://www.github.com/googleapis/nodejs-storage/issues/915)) ([a21a644](https://www.github.com/googleapis/nodejs-storage/commit/a21a6443346f91f275233a9a07fb79550035e157)) +* missing snippets with jsdoc-region-tag ([#924](https://www.github.com/googleapis/nodejs-storage/issues/924)) ([310ba90](https://www.github.com/googleapis/nodejs-storage/commit/310ba90a48c6f02a31c1254037dfcdb4da4b7150)) +* **docs:** add jsdoc-region-tag plugin ([#929](https://www.github.com/googleapis/nodejs-storage/issues/929)) ([74526e7](https://www.github.com/googleapis/nodejs-storage/commit/74526e7f42cfa92c18ff332d0b9e10ea3b1324cf)) + +### [4.1.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.1.0...v4.1.1) (2019-11-07) + + +### Bug Fixes + +* update format for upload sample ([#918](https://www.github.com/googleapis/nodejs-storage/issues/918)) ([d77208b](https://www.github.com/googleapis/nodejs-storage/commit/d77208bee82dc2d76c72fbe8d5db1bfeb8e392ff)) + +## [4.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v4.0.1...v4.1.0) (2019-10-31) + + +### Features + +* [Storage] Support UniformBucketLevelAccess ([#903](https://www.github.com/googleapis/nodejs-storage/issues/903)) ([35b1bd9](https://www.github.com/googleapis/nodejs-storage/commit/35b1bd9f6db351ad1e51b135a3f25cc5e1566600)) + +### [4.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v4.0.0...v4.0.1) (2019-10-31) + + +### Bug Fixes + +* **docs:** missing quote in signed url jsdoc ([#896](https://www.github.com/googleapis/nodejs-storage/issues/896)) ([f2d567f](https://www.github.com/googleapis/nodejs-storage/commit/f2d567f279fee0f48c2d6a607f4066c9da372549)) +* use storage.googleapis.com for api endpoint ([862fb16](https://www.github.com/googleapis/nodejs-storage/commit/862fb16db2990c958c0097252a44c775431a7b3f)) +* **signed-url:** replace encodeURIComponent with custom encoding function ([#905](https://www.github.com/googleapis/nodejs-storage/issues/905)) ([ba41517](https://www.github.com/googleapis/nodejs-storage/commit/ba415179f1d5951742c1b239e0500bbd2a815ddd)) + +## [4.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.5.0...v4.0.0) (2019-10-17) + + +### ⚠ BREAKING CHANGES + +* allow leading slashes in file name (#820) + +### Bug Fixes + +* **deps:** update hash-stream-validation ([#884](https://www.github.com/googleapis/nodejs-storage/issues/884)) ([96a7fc2](https://www.github.com/googleapis/nodejs-storage/commit/96a7fc297a563819b09727990eb9ee15a421310b)) +* allow leading slashes in file name ([#820](https://www.github.com/googleapis/nodejs-storage/issues/820)) ([92e115d](https://www.github.com/googleapis/nodejs-storage/commit/92e115dca81604909fc34e983abcf47409d3f417)) + +## [3.5.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.4.0...v3.5.0) (2019-10-14) + + +### Features + +* **bucket:** add enableLogging method ([#876](https://www.github.com/googleapis/nodejs-storage/issues/876)) ([b09ecac](https://www.github.com/googleapis/nodejs-storage/commit/b09ecac79b70bf99e19f0f23ffcecd17e34516bb)) + +## [3.4.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.3.1...v3.4.0) (2019-10-10) + + +### Bug Fixes + +* file#move do not delete origin file if same as destination ([#874](https://www.github.com/googleapis/nodejs-storage/issues/874)) ([dcaba8a](https://www.github.com/googleapis/nodejs-storage/commit/dcaba8a)) +* pass predefined acl as destinationPredefinedAcl to qs ([#872](https://www.github.com/googleapis/nodejs-storage/issues/872)) ([09b8fa4](https://www.github.com/googleapis/nodejs-storage/commit/09b8fa4)) + + +### Features + +* add flag to allow disabling auto decompression by client ([#850](https://www.github.com/googleapis/nodejs-storage/issues/850)) ([9ebface](https://www.github.com/googleapis/nodejs-storage/commit/9ebface)) +* allow setting standard Bucket storage class ([#873](https://www.github.com/googleapis/nodejs-storage/issues/873)) ([12a99e9](https://www.github.com/googleapis/nodejs-storage/commit/12a99e9)) + +### [3.3.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.3.0...v3.3.1) (2019-09-30) + + +### Bug Fixes + +* create correct v4 signed url with cname ([#868](https://www.github.com/googleapis/nodejs-storage/issues/868)) ([ace3b5e](https://www.github.com/googleapis/nodejs-storage/commit/ace3b5e)) + +## [3.3.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.2.1...v3.3.0) (2019-09-19) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/pubsub to ^0.32.0 ([#849](https://www.github.com/googleapis/nodejs-storage/issues/849)) ([fdf70bb](https://www.github.com/googleapis/nodejs-storage/commit/fdf70bb)) +* add warning for unsupported keepAcl param in file#copy ([#841](https://www.github.com/googleapis/nodejs-storage/issues/841)) ([473bda0](https://www.github.com/googleapis/nodejs-storage/commit/473bda0)) +* remove unsupported keepAcl param ([#837](https://www.github.com/googleapis/nodejs-storage/issues/837)) ([5f69a3d](https://www.github.com/googleapis/nodejs-storage/commit/5f69a3d)) +* use storage.googleapis.com for api endpoint ([#854](https://www.github.com/googleapis/nodejs-storage/issues/854)) ([27fa02f](https://www.github.com/googleapis/nodejs-storage/commit/27fa02f)) +* **deps:** update dependency @google-cloud/pubsub to v1 ([#858](https://www.github.com/googleapis/nodejs-storage/issues/858)) ([31466f4](https://www.github.com/googleapis/nodejs-storage/commit/31466f4)) +* **deps:** update dependency date-and-time to ^0.10.0 ([#857](https://www.github.com/googleapis/nodejs-storage/issues/857)) ([e9ec9cf](https://www.github.com/googleapis/nodejs-storage/commit/e9ec9cf)) + + +### Features + +* adds support for asyncIterators (via readable-stream@3 dependency) ([dd5ae7f](https://www.github.com/googleapis/nodejs-storage/commit/dd5ae7f)) +* allow removal of resumable upload cache ([#773](https://www.github.com/googleapis/nodejs-storage/issues/773)) ([da943db](https://www.github.com/googleapis/nodejs-storage/commit/da943db)), closes [#217](https://www.github.com/googleapis/nodejs-storage/issues/217) + +### [3.2.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.2.0...v3.2.1) (2019-08-28) + + +### Bug Fixes + +* **docs:** stop redirecting reference docs to anchor, add new sample to README ([bbb5537](https://www.github.com/googleapis/nodejs-storage/commit/bbb5537)) +* **samples:** fix failing sample view IAM member-role groups ([1c4f21f](https://www.github.com/googleapis/nodejs-storage/commit/1c4f21f)) + +## [3.2.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.1.0...v3.2.0) (2019-08-22) + + +### Bug Fixes + +* **deps:** update @google-cloud/common with fixes for http ([#809](https://www.github.com/googleapis/nodejs-storage/issues/809)) ([8598631](https://www.github.com/googleapis/nodejs-storage/commit/8598631)) +* **deps:** update dependency @google-cloud/pubsub to ^0.31.0 ([#814](https://www.github.com/googleapis/nodejs-storage/issues/814)) ([604e564](https://www.github.com/googleapis/nodejs-storage/commit/604e564)) +* **deps:** update dependency date-and-time to ^0.9.0 ([#805](https://www.github.com/googleapis/nodejs-storage/issues/805)) ([8739a7d](https://www.github.com/googleapis/nodejs-storage/commit/8739a7d)) +* **ts:** fix nock @~11.0.0 ([#819](https://www.github.com/googleapis/nodejs-storage/issues/819)) ([48f9b44](https://www.github.com/googleapis/nodejs-storage/commit/48f9b44)) + + +### Features + +* hmac service account ([#751](https://www.github.com/googleapis/nodejs-storage/issues/751)) ([ed1ec7b](https://www.github.com/googleapis/nodejs-storage/commit/ed1ec7b)) + +## [3.1.0](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.4...v3.1.0) (2019-08-09) + + +### Bug Fixes + +* **deps:** update dependency @google-cloud/paginator to v2 ([#781](https://www.github.com/googleapis/nodejs-storage/issues/781)) ([23244e9](https://www.github.com/googleapis/nodejs-storage/commit/23244e9)) +* **deps:** update dependency @google-cloud/pubsub to ^0.30.0 ([#778](https://www.github.com/googleapis/nodejs-storage/issues/778)) ([7256650](https://www.github.com/googleapis/nodejs-storage/commit/7256650)) +* allow calls with no request, add JSON proto ([30fff15](https://www.github.com/googleapis/nodejs-storage/commit/30fff15)) +* **deps:** update dependency date-and-time to ^0.8.0 ([#779](https://www.github.com/googleapis/nodejs-storage/issues/779)) ([ab2734d](https://www.github.com/googleapis/nodejs-storage/commit/ab2734d)) +* **deps:** upgrade @google-cloud/common version to show original… ([#795](https://www.github.com/googleapis/nodejs-storage/issues/795)) ([ea63cbe](https://www.github.com/googleapis/nodejs-storage/commit/ea63cbe)) +* **deps:** use the latest extend ([#800](https://www.github.com/googleapis/nodejs-storage/issues/800)) ([a7f0172](https://www.github.com/googleapis/nodejs-storage/commit/a7f0172)) + + +### Features + +* **file:** allow setting configPath of resumable upload ([#642](https://www.github.com/googleapis/nodejs-storage/issues/642)) ([a8ceb78](https://www.github.com/googleapis/nodejs-storage/commit/a8ceb78)) + +### [3.0.4](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.3...v3.0.4) (2019-07-25) + + +### Bug Fixes + +* **deps:** update dependency pumpify to v2 ([#740](https://www.github.com/googleapis/nodejs-storage/issues/740)) ([71a4f59](https://www.github.com/googleapis/nodejs-storage/commit/71a4f59)) + +### [3.0.3](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.2...v3.0.3) (2019-07-16) + + +### Bug Fixes + +* **typescript:** make SetLabelOptions optional ([#766](https://www.github.com/googleapis/nodejs-storage/issues/766)) ([4336882](https://www.github.com/googleapis/nodejs-storage/commit/4336882)) + +### [3.0.2](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.1...v3.0.2) (2019-07-01) + + +### Bug Fixes + +* **docs:** fix sample code in docs ([#759](https://www.github.com/googleapis/nodejs-storage/issues/759)) ([f9e5fd8](https://www.github.com/googleapis/nodejs-storage/commit/f9e5fd8)) +* **docs:** link to reference docs section on googleapis.dev ([#753](https://www.github.com/googleapis/nodejs-storage/issues/753)) ([5e3a96b](https://www.github.com/googleapis/nodejs-storage/commit/5e3a96b)) + +### [3.0.1](https://www.github.com/googleapis/nodejs-storage/compare/v3.0.0...v3.0.1) (2019-06-14) + + +### Bug Fixes + +* async should be dependency ([#743](https://www.github.com/googleapis/nodejs-storage/issues/743)) ([e542b8b](https://www.github.com/googleapis/nodejs-storage/commit/e542b8b)) + +## [3.0.0](https://www.github.com/googleapis/nodejs-storage/compare/v2.5.0...v3.0.0) (2019-06-14) + + +### ⚠ BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#688) + +### Bug Fixes + +* **deps:** update dependency @google-cloud/common to v1 ([#705](https://www.github.com/googleapis/nodejs-storage/issues/705)) ([72a9f51](https://www.github.com/googleapis/nodejs-storage/commit/72a9f51)) +* **deps:** update dependency @google-cloud/paginator to v1 ([#695](https://www.github.com/googleapis/nodejs-storage/issues/695)) ([ada995e](https://www.github.com/googleapis/nodejs-storage/commit/ada995e)) +* **deps:** update dependency @google-cloud/promisify to v1 ([#693](https://www.github.com/googleapis/nodejs-storage/issues/693)) ([5df2f83](https://www.github.com/googleapis/nodejs-storage/commit/5df2f83)) +* **deps:** update dependency @google-cloud/pubsub to ^0.29.0 ([#714](https://www.github.com/googleapis/nodejs-storage/issues/714)) ([3ee1a2c](https://www.github.com/googleapis/nodejs-storage/commit/3ee1a2c)) +* **deps:** update dependency arrify to v2 ([#667](https://www.github.com/googleapis/nodejs-storage/issues/667)) ([ce02c27](https://www.github.com/googleapis/nodejs-storage/commit/ce02c27)) +* validate action of getSignedUrl() function ([#684](https://www.github.com/googleapis/nodejs-storage/issues/684)) ([1b09d24](https://www.github.com/googleapis/nodejs-storage/commit/1b09d24)) +* **deps:** update dependency date-and-time to ^0.7.0 ([#736](https://www.github.com/googleapis/nodejs-storage/issues/736)) ([7071f26](https://www.github.com/googleapis/nodejs-storage/commit/7071f26)) +* **deps:** update dependency xdg-basedir to v4 ([#681](https://www.github.com/googleapis/nodejs-storage/issues/681)) ([8b40e6a](https://www.github.com/googleapis/nodejs-storage/commit/8b40e6a)) +* **docs:** move to new client docs URL ([#738](https://www.github.com/googleapis/nodejs-storage/issues/738)) ([a637f99](https://www.github.com/googleapis/nodejs-storage/commit/a637f99)) +* **ts:** improve return types for response metadata ([#666](https://www.github.com/googleapis/nodejs-storage/issues/666)) ([da42bed](https://www.github.com/googleapis/nodejs-storage/commit/da42bed)) +* **types:** fix signatures of listing methods ([#703](https://www.github.com/googleapis/nodejs-storage/issues/703)) ([42937a8](https://www.github.com/googleapis/nodejs-storage/commit/42937a8)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#688](https://www.github.com/googleapis/nodejs-storage/issues/688)) ([6a1fa0f](https://www.github.com/googleapis/nodejs-storage/commit/6a1fa0f)) + + +### Features + +* add file.isPublic() function ([#708](https://www.github.com/googleapis/nodejs-storage/issues/708)) ([f86cadb](https://www.github.com/googleapis/nodejs-storage/commit/f86cadb)) +* support apiEndpoint override ([#728](https://www.github.com/googleapis/nodejs-storage/issues/728)) ([61eeb64](https://www.github.com/googleapis/nodejs-storage/commit/61eeb64)) + +## v2.5.0 + +04-04-2019 12:27 PDT + +This release brings an option to file#getSignedURL to create a version 4 Signed URL. + +```javascript +file.getSignedUrl({ + version: 'v4', // optional, defaults to v2 (existing version) + action: 'read', + expires: FUTURE_DATE, +}) +``` + +### New Features +- feat: introduce v4 signed url ([#637](https://github.com/googleapis/nodejs-storage/pull/637)) + +### Dependencies +- chore(deps): update dependency @types/node to v11.13.0 ([#662](https://github.com/googleapis/nodejs-storage/pull/662)) +- chore(deps): update dependency @types/tmp to v0.1.0 +- chore(deps): upgrade to newest version of @google-cloud/common ([#657](https://github.com/googleapis/nodejs-storage/pull/657)) +- chore(deps): update dependency typescript to ~3.4.0 +- chore(deps): update dependency tmp to ^0.1.0 ([#641](https://github.com/googleapis/nodejs-storage/pull/641)) + +### Documentation +- docs: regenerate the samples/README.md ([#649](https://github.com/googleapis/nodejs-storage/pull/649)) +- docs: slight difference in how nightly synthtool run generated README ([#650](https://github.com/googleapis/nodejs-storage/pull/650)) +- docs: new synthtool generated README ([#645](https://github.com/googleapis/nodejs-storage/pull/645)) +- docs(samples): refactor the quickstart to match the new rubric ([#647](https://github.com/googleapis/nodejs-storage/pull/647)) +- docs: update README format +- docs: add requires_billing, retire .cloud-repo-tools.json ([#644](https://github.com/googleapis/nodejs-storage/pull/644)) +- docs: add additional api_id field ([#640](https://github.com/googleapis/nodejs-storage/pull/640)) +- docs: document destination option ([#633](https://github.com/googleapis/nodejs-storage/pull/633)) +- docs: clarify in docs, the meaning of ASIA and coldline ([#632](https://github.com/googleapis/nodejs-storage/pull/632)) +- docs: add a .repo-metadata.json ([#639](https://github.com/googleapis/nodejs-storage/pull/639)) + +### Internal / Testing Changes +- test(v2-sign): add multi-valued headers system-test ([#646](https://github.com/googleapis/nodejs-storage/pull/646)) +- refactor: replace once with onetime ([#660](https://github.com/googleapis/nodejs-storage/pull/660)) +- fix: do not download cached files ([#643](https://github.com/googleapis/nodejs-storage/pull/643)) +- chore: publish to npm using wombat ([#634](https://github.com/googleapis/nodejs-storage/pull/634)) +- build: use per-repo npm publish token ([#630](https://github.com/googleapis/nodejs-storage/pull/630)) + +## v2.4.3 + +03-13-2019 17:10 PDT + +### Bug Fixes / Implementation Changes +- fix: getSigned(Policy|Url) throws if expiration is invalid Date ([#614](https://github.com/googleapis/nodejs-storage/pull/614)) +- fix: handle errors from file#createReadStream ([#615](https://github.com/googleapis/nodejs-storage/pull/615)) + +### Dependencies +- fix(deps): update dependency @google-cloud/paginator to ^0.2.0 +- fix(deps): update dependency gcs-resumable-upload to v1 ([#619](https://github.com/googleapis/nodejs-storage/pull/619)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.27.0 ([#620](https://github.com/googleapis/nodejs-storage/pull/620)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.26.0 ([#618](https://github.com/googleapis/nodejs-storage/pull/618)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.25.0 ([#616](https://github.com/googleapis/nodejs-storage/pull/616)) +- chore(deps): update dependency mocha to v6 ([#611](https://github.com/googleapis/nodejs-storage/pull/611)) +- fix(deps): update dependency @google-cloud/promisify to ^0.4.0 ([#609](https://github.com/googleapis/nodejs-storage/pull/609)) +- chore(deps): update dependency @types/tmp to v0.0.34 ([#608](https://github.com/googleapis/nodejs-storage/pull/608)) +- fix(deps): update dependency yargs to v13 ([#606](https://github.com/googleapis/nodejs-storage/pull/606)) + +### Documentation +- docs: update links in contrib guide ([#610](https://github.com/googleapis/nodejs-storage/pull/610)) +- docs: update contributing path in README ([#603](https://github.com/googleapis/nodejs-storage/pull/603)) +- chore: move CONTRIBUTING.md to root ([#601](https://github.com/googleapis/nodejs-storage/pull/601)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#624](https://github.com/googleapis/nodejs-storage/pull/624)) +- build: use node10 to run samples-test, system-test etc ([#623](https://github.com/googleapis/nodejs-storage/pull/623)) +- build: update release configuration +- build: use linkinator for docs test ([#607](https://github.com/googleapis/nodejs-storage/pull/607)) +- build: create docs test npm scripts ([#605](https://github.com/googleapis/nodejs-storage/pull/605)) +- build: test using @grpc/grpc-js in CI ([#604](https://github.com/googleapis/nodejs-storage/pull/604)) +- chore: remove console.log in system test ([#599](https://github.com/googleapis/nodejs-storage/pull/599)) + +## v2.4.2 + +02-05-2019 16:55 PST + +### Dependencies + +- deps: update @google-cloud/common ([#596](https://github.com/googleapis/nodejs-storage/pull/596)) +- chore(deps): update dependency typescript to ~3.3.0 ([#591](https://github.com/googleapis/nodejs-storage/pull/591)) + +### Documentation + +- docs: add lint/fix example to contributing guide ([#594](https://github.com/googleapis/nodejs-storage/pull/594)) + +### Internal / Testing Changes + +- test: skip public bucket system tests running under VPCSC ([#595](https://github.com/googleapis/nodejs-storage/pull/595)) + +## v2.4.1 + +01-29-2019 13:05 PST + +### Implementation Changes +- fix(ts): fix Storage.createBucket overloaded signature ([#589](https://github.com/googleapis/nodejs-storage/pull/589)) + +### Dependencies +- fix(deps): update dependency @google-cloud/pubsub to ^0.24.0 ([#588](https://github.com/googleapis/nodejs-storage/pull/588)) + +## v2.4.0 + +01-28-2019 12:13 PST + +### New Features +- fix: `expires` can be a Date, string, or number ([#548](https://github.com/googleapis/nodejs-storage/pull/548)) + +### Dependencies +- deps: upgrade nodejs-common ([#582](https://github.com/googleapis/nodejs-storage/pull/582)) +- chore(deps): update dependency eslint-config-prettier to v4 ([#586](https://github.com/googleapis/nodejs-storage/pull/586)) +- fix(deps): update dependency @google-cloud/pubsub to ^0.23.0 ([#583](https://github.com/googleapis/nodejs-storage/pull/583)) +- fix(deps): update dependency concat-stream to v2 ([#563](https://github.com/googleapis/nodejs-storage/pull/563)) + +### Documentation +- docs(samples): Bucket Policy Only Samples ([#557](https://github.com/googleapis/nodejs-storage/pull/557)) +- fix(docs): move jsdoc away from interface ([#565](https://github.com/googleapis/nodejs-storage/pull/565)) + +### Internal / Testing Changes +- test: Bucket Policy Only related system test ([#579](https://github.com/googleapis/nodejs-storage/pull/579)) +- build: check broken links in generated docs ([#567](https://github.com/googleapis/nodejs-storage/pull/567)) +- build: include only build/src in compiled source ([#572](https://github.com/googleapis/nodejs-storage/pull/572)) + +## v2.3.4 + +12-19-2018 14:21 PST + +### Implementation Changes +- fix(types): file.getMetadata should resolves to Metadata, not File ([#560](https://github.com/googleapis/nodejs-storage/pull/560)) + +### Internal / Testing Changes +- refactor: modernize the sample tests ([#558](https://github.com/googleapis/nodejs-storage/pull/558)) +- chore(build): inject yoshi automation key ([#555](https://github.com/googleapis/nodejs-storage/pull/555)) +- chore: update nyc and eslint configs ([#554](https://github.com/googleapis/nodejs-storage/pull/554)) +- chore: fix publish.sh permission +x ([#552](https://github.com/googleapis/nodejs-storage/pull/552)) +- fix(build): fix Kokoro release script ([#551](https://github.com/googleapis/nodejs-storage/pull/551)) +- build: add Kokoro configs for autorelease ([#550](https://github.com/googleapis/nodejs-storage/pull/550)) + +## v2.3.3 + +12-06-2018 17:09 PST + +### Dependencies +- chore(deps): update dependency @types/configstore to v4 ([#537](https://github.com/googleapis/nodejs-storage/pull/537)) +- chore(deps): update dependency @google-cloud/pubsub to ^0.22.0 ([#535](https://github.com/googleapis/nodejs-storage/pull/535)) + +### Documentation +- fix(docs): place doc comment above the last overload ([#544](https://github.com/googleapis/nodejs-storage/pull/544)) +- docs: update readme badges ([#536](https://github.com/googleapis/nodejs-storage/pull/536)) + +### Internal / Testing Changes +- chore: always nyc report before calling codecov ([#543](https://github.com/googleapis/nodejs-storage/pull/543)) +- chore: nyc ignore build/test by default ([#542](https://github.com/googleapis/nodejs-storage/pull/542)) +- chore: update license file ([#539](https://github.com/googleapis/nodejs-storage/pull/539)) + +## v2.3.2 + +This patch release fixed an issue affecting reading from a file on GCS ([#528](https://github.com/googleapis/nodejs-storage/issues/528)). + +### Dependencies +- fix(dep): upgrade teeny-request to v3.11.3 ([#529](https://github.com/googleapis/nodejs-storage/pull/529)) +- fix(deps): update dependency @google-cloud/common to ^0.27.0 ([#525](https://github.com/googleapis/nodejs-storage/pull/525)) + +### Internal / Testing Changes +- refactor: convert sample tests from ava to mocha ([#523](https://github.com/googleapis/nodejs-storage/pull/523)) +- docs(samples): updated samples code to use async await ([#521](https://github.com/googleapis/nodejs-storage/pull/521)) +- chore: add synth.metadata +- fix(ts): Update bucket options types ([#518](https://github.com/googleapis/nodejs-storage/pull/518)) + +## v2.3.1 + +11-14-2018 22:15 PST + +### Bug fixes +- fix: fix TypeScript and system tests ([#515](https://github.com/googleapis/nodejs-storage/pull/515)) +- fix(deps): update dependency through2 to v3 ([#507](https://github.com/googleapis/nodejs-storage/pull/507)) +- docs: File#setMetadata in parallel results in unpredictable state ([#504](https://github.com/googleapis/nodejs-storage/pull/504)) + +### Internal / Testing Changes +- chore(deps): update dependency gts to ^0.9.0 ([#514](https://github.com/googleapis/nodejs-storage/pull/514)) +- chore: update eslintignore config ([#513](https://github.com/googleapis/nodejs-storage/pull/513)) +- chore(deps): update dependency @google-cloud/nodejs-repo-tools to v3 ([#512](https://github.com/googleapis/nodejs-storage/pull/512)) +- refactor: use object.assign where possible ([#510](https://github.com/googleapis/nodejs-storage/pull/510)) +- chore: drop contributors from multiple places ([#511](https://github.com/googleapis/nodejs-storage/pull/511)) +- chore: use latest npm on Windows ([#509](https://github.com/googleapis/nodejs-storage/pull/509)) + +## v2.3.0 + +### Implementation Changes +- fix(types): Fixes getSignedUrl Return Type ([#496](https://github.com/googleapis/nodejs-storage/pull/496)) +- +### New Features +- Introduce Object Lifecycle Management ([#471](https://github.com/googleapis/nodejs-storage/pull/471)) + +### Dependencies +- chore(deps): update dependency eslint-plugin-node to v8 ([#490](https://github.com/googleapis/nodejs-storage/pull/490)) + +### Internal / Testing Changes +- chore: update issue templates ([#488](https://github.com/googleapis/nodejs-storage/pull/488)) + +## v2.2.0 + +### Bug Fixes +- fix: re-enable typescript types ([#484](https://github.com/googleapis/nodejs-storage/pull/484)) + +### Dependencies +- fix(deps): update dependency @google-cloud/common to ^0.26.0 (edited) ([#480](https://github.com/googleapis/nodejs-storage/pull/480)) +- chore: Remove 'is' dependency ([#462](https://github.com/googleapis/nodejs-storage/pull/462)) +- chore: upgrade teeny-request to 3.11.0 with type definitions ([#457](https://github.com/googleapis/nodejs-storage/pull/457)) +- feat: use small HTTP dependency ([#416](https://github.com/googleapis/nodejs-storage/pull/416)) + +### Documentation +- docs: Minor docs correction ([#465](https://github.com/googleapis/nodejs-storage/pull/465)) + +### Internal / Testing Changes +- chore: remove old issue template ([#485](https://github.com/googleapis/nodejs-storage/pull/485)) +- chore(typescript): enable noImplicitAny ([#483](https://github.com/googleapis/nodejs-storage/pull/483)) +- chore(typescript): improve typescript types and update tests ([#482](https://github.com/googleapis/nodejs-storage/pull/482)) +- build: run tests on node11 ([#481](https://github.com/googleapis/nodejs-storage/pull/481)) +- chores(build): do not collect sponge.xml from windows builds ([#478](https://github.com/googleapis/nodejs-storage/pull/478)) +- chores(build): run codecov on continuous builds ([#476](https://github.com/googleapis/nodejs-storage/pull/476)) +- chore: update new issue template ([#475](https://github.com/googleapis/nodejs-storage/pull/475)) +- fix: enable noImplicitAny for src/bucket.ts ([#472](https://github.com/googleapis/nodejs-storage/pull/472)) +- fix(tests): use unique prefix for system tests to avoid collision with another run ([#468](https://github.com/googleapis/nodejs-storage/pull/468)) +- fix: improve the types ([#467](https://github.com/googleapis/nodejs-storage/pull/467)) +- chore: move class Storage to storage.ts, create index.ts that contains all exports ([#464](https://github.com/googleapis/nodejs-storage/pull/464)) +- chore: add types to many unit tests ([#463](https://github.com/googleapis/nodejs-storage/pull/463)) +- fix: Annotate Iam types ([#461](https://github.com/googleapis/nodejs-storage/pull/461)) +- fix: complete bucket.ts noImplicitAny ([#460](https://github.com/googleapis/nodejs-storage/pull/460)) +- fix: improve the types on acl.ts ([#459](https://github.com/googleapis/nodejs-storage/pull/459)) +- fix: improve types (7) ([#458](https://github.com/googleapis/nodejs-storage/pull/458)) +- fix: improve the types ([#453](https://github.com/googleapis/nodejs-storage/pull/453)) +- chore: update build config ([#455](https://github.com/googleapis/nodejs-storage/pull/455)) +- fix: improve typescript types in src/file.ts ([#450](https://github.com/googleapis/nodejs-storage/pull/450)) +- build: fix codecov uploading on Kokoro ([#451](https://github.com/googleapis/nodejs-storage/pull/451)) +- test: Attempt to re-enable iam#testPermissions ([#429](https://github.com/googleapis/nodejs-storage/pull/429)) +- chore(deps): update dependency sinon to v7 ([#449](https://github.com/googleapis/nodejs-storage/pull/449)) +- Re-generate library using /synth.py ([#448](https://github.com/googleapis/nodejs-storage/pull/448)) +- Correct parameter name. ([#446](https://github.com/googleapis/nodejs-storage/pull/446)) + +## v2.1.0 + +This release brings support for Bucket/Object lock operations, as well as disable TypeScript as we continue to annotate the project with types. + +### New Features +- feat: Support Bucket/Object lock operations ([#374](https://github.com/googleapis/nodejs-storage/pull/374)) + +### Implementation Changes +- disable types for now ([#392](https://github.com/googleapis/nodejs-storage/pull/392)) +- Don't publish sourcemaps ([#412](https://github.com/googleapis/nodejs-storage/pull/412)) +#### TypeScript support (in progress) +- fix: add better types for file.ts ([#436](https://github.com/googleapis/nodejs-storage/pull/436)) +- fix: use ~ for typescript (and fix compile errors) ([#426](https://github.com/googleapis/nodejs-storage/pull/426)) +- fix: Add typing for File#download() ([#409](https://github.com/googleapis/nodejs-storage/pull/409)) +- chore: convert system tests to typescript ([#424](https://github.com/googleapis/nodejs-storage/pull/424)) +- Improve TypeScript types (part 4) ([#402](https://github.com/googleapis/nodejs-storage/pull/402)) +- ts: convert jsdoc types to typescript interfaces (1) ([#383](https://github.com/googleapis/nodejs-storage/pull/383)) +- fix: TS definition ([#387](https://github.com/googleapis/nodejs-storage/pull/387)) +- Annotate types [#3](https://github.com/googleapis/nodejs-storage/pull/3) ([#391](https://github.com/googleapis/nodejs-storage/pull/391)) +- Annotate types (2) ([#388](https://github.com/googleapis/nodejs-storage/pull/388)) + +### Dependencies +- chore(deps): update dependency eslint-plugin-prettier to v3 ([#419](https://github.com/googleapis/nodejs-storage/pull/419)) + +### Documentation +- docs: Modify source location for templates ([#410](https://github.com/googleapis/nodejs-storage/pull/410)) +- docs: Explain `Bucket#upload()` still exists ([#421](https://github.com/googleapis/nodejs-storage/pull/421)) + +### Internal / Testing Changes +- fix(tests): fix system tests on CircleCI ([#431](https://github.com/googleapis/nodejs-storage/pull/431)) +- fix(tests): system-test compiles to ./build, fix relative path ([#428](https://github.com/googleapis/nodejs-storage/pull/428)) +- Update kokoro config ([#425](https://github.com/googleapis/nodejs-storage/pull/425)) +- chore(samples): convert samples to async/await ([#422](https://github.com/googleapis/nodejs-storage/pull/422)) +- build: samples test by adding approprate test variables ([#423](https://github.com/googleapis/nodejs-storage/pull/423)) +- build: bring in latest kokoro cfgs to run System tests on PRs ([#413](https://github.com/googleapis/nodejs-storage/pull/413)) +- test: remove appveyor config ([#411](https://github.com/googleapis/nodejs-storage/pull/411)) +- Enable prefer-const in the eslint config ([#404](https://github.com/googleapis/nodejs-storage/pull/404)) +- fix(test): instantiate PubSub using new ([#403](https://github.com/googleapis/nodejs-storage/pull/403)) +- fix: optionsOrCallback could be undefined if not given, check before assign ([#401](https://github.com/googleapis/nodejs-storage/pull/401)) +- Fix the requesterPays methods ([#400](https://github.com/googleapis/nodejs-storage/pull/400)) +- Enable no-var in eslint ([#398](https://github.com/googleapis/nodejs-storage/pull/398)) +- samples: don't use USA formatted dates for expiry ([#396](https://github.com/googleapis/nodejs-storage/pull/396)) +- fix: copy(): Use correct destination file name in URI ([#389](https://github.com/googleapis/nodejs-storage/pull/389)) + +## v2.0.3 + +### Implementation Changes +- Improve TypeScript types ([#381](https://github.com/googleapis/nodejs-storage/pull/381)) +- Make some parameters optional ([#380](https://github.com/googleapis/nodejs-storage/pull/380)) + +## v2.0.2 + +### Implementation Changes +- Improve the types (#377) + +## v2.0.1 + +**This fixes types declaration issues with projects using TypeScript.** + +### Implementation Changes +- Enable noImplicitThis in the tsconfig ([#370](https://github.com/googleapis/nodejs-storage/pull/370)) +- Fix the path to the d.ts ([#364](https://github.com/googleapis/nodejs-storage/pull/364)) +- fix: make dependency on request explicit ([#361](https://github.com/googleapis/nodejs-storage/pull/361)) +- fix: remove trailing slashes from bucket name. ([#266](https://github.com/googleapis/nodejs-storage/pull/266)) + +### Dependencies +- fix(deps): update dependency @google-cloud/common to ^0.24.0 ([#367](https://github.com/googleapis/nodejs-storage/pull/367)) +- fix(deps): update dependency gcs-resumable-upload to ^0.13.0 ([#368](https://github.com/googleapis/nodejs-storage/pull/368)) +- Remove unused dependencies ([#363](https://github.com/googleapis/nodejs-storage/pull/363)) +- Remove safe-buffer ([#359](https://github.com/googleapis/nodejs-storage/pull/359)) +- samples: update dependency @google-cloud/storage to v2 ([#350](https://github.com/googleapis/nodejs-storage/pull/350)) + +### Internal / Testing Changes +- Update CI config ([#371](https://github.com/googleapis/nodejs-storage/pull/371)) +- build(kokoro): run docker as user node ([#358](https://github.com/googleapis/nodejs-storage/pull/358)) +- build: fix multiline in circle.yml ([#357](https://github.com/googleapis/nodejs-storage/pull/357)) +- fix executable modes on .sh's; add pre-system-test.sh hook ([#356](https://github.com/googleapis/nodejs-storage/pull/356)) +- decrypt both service account keys ([#353](https://github.com/googleapis/nodejs-storage/pull/353)) +- Retry npm install in CI ([#352](https://github.com/googleapis/nodejs-storage/pull/352)) +- Add synth script and run it ([#351](https://github.com/googleapis/nodejs-storage/pull/351)) + +## v2.0.0 + +**This release has breaking changes**. This release has a few notable breaking changes. Please take care when upgrading! + +### require syntax changes +The import style of this library has been changed to support [es module](https://nodejs.org/api/esm.html) syntax. This provides both forward compatibility with es modules, and better supports the TypeScript and Babel ecosystems. As a result, the import syntax has changed: + +#### Old Code +```js +const storage = require('@google-cloud/storage')(); +// or... +const Storage = require('@google-cloud/storage'); +const storage = new Storage({ + // config... +}); +``` + +#### New Code +```js +const {Storage} = require('@google-cloud/storage'); +const storage = new Storage({ + // config... +}); +``` + +### `bucket.upload` no longer accepts URLs +To better support a variety of HTTP clients, the remote fetching functionality of `bucket.upload` has been removed. It can be replaced with your favorite HTTP client. + +#### Old Code +```js +bucket.upload('https://example.com/images/image.png', function(err, file, res) { + // handle upload... +}); +``` + +#### New Code + +```js +const request = require('request'); +const file = bucket.file(name); +const writeStream = file.createWriteStream(); +request(url).pipe(writeStream); +``` + +### Breaking changes +- semver: do not support upload() from url (#337) +- fix: drop support for node.js 4.x and 9.x (#282) + +### Features +- refactor(ts): merge initial TypeScript conversion (#334) +- feat: Add Storage#getServiceAccount(). (#331) +- Kms sample (#209) + +### Bug fixes +- fix: gzip and Cache-Control headers in upload sample (#225) +- fix: move this.[ROLE]s initialization from Acl to AclAccessorRoleMethods (#252) +- fix: signedURL cname (#210) (#234) + +### Internal / Testing Changes +- chore(deps): update dependency nyc to v13 (#341) +- fix(deps): update dependency @google-cloud/common to ^0.23.0 (#340) +- test: throw on deprecation (#319) +- chore(deps): update dependency eslint-config-prettier to v3 (#336) +- fix(deps): update dependency gcs-resumable-upload to ^0.12.0 (#317) +- Fix system tests for string comparisons (#328) +- chore: ignore package-lock.json (#326) +- chore: update renovate config (#322) +- chore: regen lock files (#318) +- chore(deps): lock file maintenance (#313) +- chore: move mocha options to mocha.opts (#311) +- chore(deps): lock file maintenance (#309) +- test: use strictEqual in tests (#306) +- chore(deps): update dependency eslint-plugin-node to v7 (#305) +- chore(deps): lock file maintenance (#303) +- chore(deps): lock file maintenance (#285) +- fix: test meant to assert err msg exists (#280) +- fix(deps): update dependency yargs to v12 (#270) +- fix(deps): update dependency uuid to v3.3.2 (#269) +- chore: update gcs-resumable-upload to 0.11.1 (#265) +- fix(deps): update dependency uuid to v3.3.0 (#262) +- chore(deps): update dependency sinon to v6 (#263) +- Configure Renovate (#250) +- refactor: drop repo-tool as an exec wrapper (#258) +- chore: update sample lockfiles (#256) +- fix: update linking for samples (#254) +- chore(package): update eslint to version 5.0.0 (#253) +- refactor(es6): Refactor constructor pattern as ES6 class (#246) +- Update @google-cloud/common to the latest version 🚀 (#226) +- system-tests: fix channel test. (#243) +- refactor: Update to the latest version of nodejs-common and gcs-resumable-upload (#202) +- Fix permission of bash script for Kokoro (#223) +- chore(package): update nyc to version 12.0.2 (#216) +- chore: fix prettier incompatibility (#211) diff --git a/node_modules/@google-cloud/storage/LICENSE b/node_modules/@google-cloud/storage/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/@google-cloud/storage/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@google-cloud/storage/README.md b/node_modules/@google-cloud/storage/README.md new file mode 100644 index 0000000..73aa251 --- /dev/null +++ b/node_modules/@google-cloud/storage/README.md @@ -0,0 +1,288 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Cloud Storage: Node.js Client](https://github.com/googleapis/nodejs-storage) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/@google-cloud/storage.svg)](https://www.npmjs.org/package/@google-cloud/storage) + + + + +> Node.js idiomatic client for [Cloud Storage][product-docs]. + +[Cloud Storage](https://cloud.google.com/storage/docs) allows world-wide +storage and retrieval of any amount of data at any time. You can use Google +Cloud Storage for a range of scenarios including serving website content, +storing data for archival and disaster recovery, or distributing large data +objects to users via direct download. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/nodejs-storage/blob/main/CHANGELOG.md). + +* [Google Cloud Storage Node.js Client API Reference][client-docs] +* [Google Cloud Storage Documentation][product-docs] +* [github.com/googleapis/nodejs-storage](https://github.com/googleapis/nodejs-storage) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + * [Before you begin](#before-you-begin) + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Before you begin + +1. [Select or create a Cloud Platform project][projects]. +1. [Enable billing for your project][billing]. +1. [Enable the Google Cloud Storage API][enable_api]. +1. [Set up authentication with a service account][auth] so you can access the + API from your local workstation. + +### Installing the client library + +```bash +npm install @google-cloud/storage +``` + + +### Using the client library + +```javascript +// Imports the Google Cloud client library +const {Storage} = require('@google-cloud/storage'); + +// For more information on ways to initialize Storage, please see +// https://googleapis.dev/nodejs/storage/latest/Storage.html + +// Creates a client using Application Default Credentials +const storage = new Storage(); + +// Creates a client from a Google service account key +// const storage = new Storage({keyFilename: 'key.json'}); + +/** + * TODO(developer): Uncomment these variables before running the sample. + */ +// The ID of your GCS bucket +// const bucketName = 'your-unique-bucket-name'; + +async function createBucket() { + // Creates the new bucket + await storage.createBucket(bucketName); + console.log(`Bucket ${bucketName} created.`); +} + +createBucket().catch(console.error); + +``` + + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/nodejs-storage/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Add Bucket Conditional Binding | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketConditionalBinding.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketConditionalBinding.js,samples/README.md) | +| Add Bucket Default Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketDefaultOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketDefaultOwnerAcl.js,samples/README.md) | +| Add Bucket Iam Member | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketIamMember.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketIamMember.js,samples/README.md) | +| Storage Add Bucket Label. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketLabel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketLabel.js,samples/README.md) | +| Add Bucket Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketOwnerAcl.js,samples/README.md) | +| Bucket Website Configuration. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addBucketWebsiteConfiguration.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addBucketWebsiteConfiguration.js,samples/README.md) | +| Add File Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/addFileOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/addFileOwnerAcl.js,samples/README.md) | +| Storage Get Bucket Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/bucketMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/bucketMetadata.js,samples/README.md) | +| Change Bucket's Default Storage Class. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeDefaultStorageClass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeDefaultStorageClass.js,samples/README.md) | +| Storage File Convert CSEK to CMEK. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/changeFileCSEKToCMEK.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/changeFileCSEKToCMEK.js,samples/README.md) | +| Storage Combine files. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/composeFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/composeFile.js,samples/README.md) | +| Storage Configure Bucket Cors. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureBucketCors.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureBucketCors.js,samples/README.md) | +| Configure Retries | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/configureRetries.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/configureRetries.js,samples/README.md) | +| Copy File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyFile.js,samples/README.md) | +| Copy Old Version Of File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/copyOldVersionOfFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/copyOldVersionOfFile.js,samples/README.md) | +| Create a Dual-Region Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithDualRegion.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithDualRegion.js,samples/README.md) | +| Create Bucket With Storage Class and Location. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithStorageClassAndLocation.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithStorageClassAndLocation.js,samples/README.md) | +| Create Bucket With Turbo Replication | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createBucketWithTurboReplication.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createBucketWithTurboReplication.js,samples/README.md) | +| Create New Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNewBucket.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNewBucket.js,samples/README.md) | +| Create Notification | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/createNotification.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/createNotification.js,samples/README.md) | +| Delete Bucket | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteBucket.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteBucket.js,samples/README.md) | +| Delete File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteFile.js,samples/README.md) | +| Delete Notification | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteNotification.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteNotification.js,samples/README.md) | +| Delete Old Version Of File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/deleteOldVersionOfFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/deleteOldVersionOfFile.js,samples/README.md) | +| Disable Bucket Lifecycle Management | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketLifecycleManagement.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketLifecycleManagement.js,samples/README.md) | +| Storage Disable Bucket Versioning. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableBucketVersioning.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableBucketVersioning.js,samples/README.md) | +| Disable Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableDefaultEventBasedHold.js,samples/README.md) | +| Disable Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableRequesterPays.js,samples/README.md) | +| Disable Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/disableUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/disableUniformBucketLevelAccess.js,samples/README.md) | +| Download Byte Range | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadByteRange.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadByteRange.js,samples/README.md) | +| Download Encrypted File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadEncryptedFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadEncryptedFile.js,samples/README.md) | +| Download File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFile.js,samples/README.md) | +| Download a File in Chunks With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileInChunksWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileInChunksWithTransferManager.js,samples/README.md) | +| Download File Using Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFileUsingRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFileUsingRequesterPays.js,samples/README.md) | +| Download Folder With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadFolderWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadFolderWithTransferManager.js,samples/README.md) | +| Download Into Memory | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadIntoMemory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadIntoMemory.js,samples/README.md) | +| Download Many Files With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadManyFilesWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadManyFilesWithTransferManager.js,samples/README.md) | +| Storage Download Public File. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/downloadPublicFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/downloadPublicFile.js,samples/README.md) | +| Enable Bucket Lifecycle Management | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketLifecycleManagement.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketLifecycleManagement.js,samples/README.md) | +| Storage Enable Bucket Versioning. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableBucketVersioning.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableBucketVersioning.js,samples/README.md) | +| Enable Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultEventBasedHold.js,samples/README.md) | +| Enable Default KMS Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableDefaultKMSKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableDefaultKMSKey.js,samples/README.md) | +| Enable Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableRequesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableRequesterPays.js,samples/README.md) | +| Enable Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/enableUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/enableUniformBucketLevelAccess.js,samples/README.md) | +| Change File's Storage Class. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileChangeStorageClass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileChangeStorageClass.js,samples/README.md) | +| Storage Set File Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/fileSetMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/fileSetMetadata.js,samples/README.md) | +| Generate Encryption Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateEncryptionKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateEncryptionKey.js,samples/README.md) | +| Generate Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateSignedUrl.js,samples/README.md) | +| Generate V4 Read Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4ReadSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4ReadSignedUrl.js,samples/README.md) | +| Generate V4 Signed Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4SignedPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4SignedPolicy.js,samples/README.md) | +| Generate V4 Upload Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/generateV4UploadSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/generateV4UploadSignedUrl.js,samples/README.md) | +| Get Autoclass | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getAutoclass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getAutoclass.js,samples/README.md) | +| Get Default Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getDefaultEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getDefaultEventBasedHold.js,samples/README.md) | +| Get Metadata | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadata.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadata.js,samples/README.md) | +| Get Metadata Notifications | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getMetadataNotifications.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getMetadataNotifications.js,samples/README.md) | +| Get Public Access Prevention | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getPublicAccessPrevention.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getPublicAccessPrevention.js,samples/README.md) | +| Get RPO | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRPO.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRPO.js,samples/README.md) | +| Get Requester Pays Status | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRequesterPaysStatus.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRequesterPaysStatus.js,samples/README.md) | +| Get Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getRetentionPolicy.js,samples/README.md) | +| Storage Get Service Account. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getServiceAccount.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getServiceAccount.js,samples/README.md) | +| Get Uniform Bucket Level Access | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/getUniformBucketLevelAccess.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/getUniformBucketLevelAccess.js,samples/README.md) | +| Activate HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyActivate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyActivate.js,samples/README.md) | +| Create HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyCreate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyCreate.js,samples/README.md) | +| Deactivate HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDeactivate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDeactivate.js,samples/README.md) | +| Delete HMAC SA Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyDelete.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyDelete.js,samples/README.md) | +| Get HMAC SA Key Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeyGet.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeyGet.js,samples/README.md) | +| List HMAC SA Keys Metadata. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/hmacKeysList.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/hmacKeysList.js,samples/README.md) | +| List Buckets | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listBuckets.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listBuckets.js,samples/README.md) | +| List Files | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFiles.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFiles.js,samples/README.md) | +| List Files By Prefix | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesByPrefix.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesByPrefix.js,samples/README.md) | +| List Files Paginate | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesPaginate.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesPaginate.js,samples/README.md) | +| List Files with Old Versions. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listFilesWithOldVersions.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listFilesWithOldVersions.js,samples/README.md) | +| List Notifications | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/listNotifications.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/listNotifications.js,samples/README.md) | +| Lock Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/lockRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/lockRetentionPolicy.js,samples/README.md) | +| Storage Make Bucket Public. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makeBucketPublic.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makeBucketPublic.js,samples/README.md) | +| Make Public | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/makePublic.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/makePublic.js,samples/README.md) | +| Move File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/moveFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/moveFile.js,samples/README.md) | +| Print Bucket Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAcl.js,samples/README.md) | +| Print Bucket Acl For User | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printBucketAclForUser.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printBucketAclForUser.js,samples/README.md) | +| Print File Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAcl.js,samples/README.md) | +| Print File Acl For User | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/printFileAclForUser.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/printFileAclForUser.js,samples/README.md) | +| Quickstart | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | +| Release Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseEventBasedHold.js,samples/README.md) | +| Release Temporary Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/releaseTemporaryHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/releaseTemporaryHold.js,samples/README.md) | +| Remove Bucket Conditional Binding | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketConditionalBinding.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketConditionalBinding.js,samples/README.md) | +| Storage Remove Bucket Cors Configuration. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketCors.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketCors.js,samples/README.md) | +| Remove Bucket Default Owner | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketDefaultOwner.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketDefaultOwner.js,samples/README.md) | +| Remove Bucket Iam Member | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketIamMember.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketIamMember.js,samples/README.md) | +| Storage Remove Bucket Label. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketLabel.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketLabel.js,samples/README.md) | +| Remove Bucket Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeBucketOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeBucketOwnerAcl.js,samples/README.md) | +| Remove Default KMS Key. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeDefaultKMSKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeDefaultKMSKey.js,samples/README.md) | +| Remove File Owner Acl | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeFileOwnerAcl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeFileOwnerAcl.js,samples/README.md) | +| Remove Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/removeRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/removeRetentionPolicy.js,samples/README.md) | +| Rename File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/renameFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/renameFile.js,samples/README.md) | +| Rotate Encryption Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/rotateEncryptionKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/rotateEncryptionKey.js,samples/README.md) | +| Set Autoclass | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setAutoclass.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setAutoclass.js,samples/README.md) | +| Set Client Endpoint | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setClientEndpoint.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setClientEndpoint.js,samples/README.md) | +| Set Event Based Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setEventBasedHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setEventBasedHold.js,samples/README.md) | +| Set Public Access Prevention Enforced | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionEnforced.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionEnforced.js,samples/README.md) | +| Set Public Access Prevention Inherited | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setPublicAccessPreventionInherited.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setPublicAccessPreventionInherited.js,samples/README.md) | +| Set RPO Async Turbo | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPOAsyncTurbo.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPOAsyncTurbo.js,samples/README.md) | +| Set RPO Default | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRPODefault.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRPODefault.js,samples/README.md) | +| Set Retention Policy | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setRetentionPolicy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setRetentionPolicy.js,samples/README.md) | +| Set Temporary Hold | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/setTemporaryHold.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/setTemporaryHold.js,samples/README.md) | +| Stream File Download | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileDownload.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileDownload.js,samples/README.md) | +| Stream File Upload | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/streamFileUpload.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/streamFileUpload.js,samples/README.md) | +| Upload a directory to a bucket. | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectory.js,samples/README.md) | +| Upload Directory With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadDirectoryWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadDirectoryWithTransferManager.js,samples/README.md) | +| Upload Encrypted File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadEncryptedFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadEncryptedFile.js,samples/README.md) | +| Upload File | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFile.js,samples/README.md) | +| Upload a File in Chunks With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFileInChunksWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFileInChunksWithTransferManager.js,samples/README.md) | +| Upload File With Kms Key | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFileWithKmsKey.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFileWithKmsKey.js,samples/README.md) | +| Upload From Memory | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadFromMemory.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadFromMemory.js,samples/README.md) | +| Upload Many Files With Transfer Manager | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadManyFilesWithTransferManager.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadManyFilesWithTransferManager.js,samples/README.md) | +| Upload Without Authentication | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthentication.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthentication.js,samples/README.md) | +| Upload Without Authentication Signed Url | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/uploadWithoutAuthenticationSignedUrl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/uploadWithoutAuthenticationSignedUrl.js,samples/README.md) | +| View Bucket Iam Members | [source code](https://github.com/googleapis/nodejs-storage/blob/main/samples/viewBucketIamMembers.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/viewBucketIamMembers.js,samples/README.md) | + + + +The [Google Cloud Storage Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install @google-cloud/storage@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-storage/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/nodejs-storage/blob/main/LICENSE) + +[client-docs]: https://googleapis.dev/nodejs/storage/latest +[product-docs]: https://cloud.google.com/storage +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing +[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=storage-api.googleapis.com +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/@google-cloud/storage/build/cjs/package.json b/node_modules/@google-cloud/storage/build/cjs/package.json new file mode 100644 index 0000000..6a0d2ef --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/build/cjs/src/acl.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/acl.d.ts new file mode 100644 index 0000000..d8f31a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/acl.d.ts @@ -0,0 +1,165 @@ +import { BodyResponseCallback, DecorateRequestOptions, BaseMetadata } from './nodejs-common/index.js'; +export interface AclOptions { + pathPrefix: string; + request: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; +} +export type GetAclResponse = [ + AccessControlObject | AccessControlObject[], + AclMetadata +]; +export interface GetAclCallback { + (err: Error | null, acl?: AccessControlObject | AccessControlObject[] | null, apiResponse?: AclMetadata): void; +} +export interface GetAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface UpdateAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type UpdateAclResponse = [AccessControlObject, AclMetadata]; +export interface UpdateAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: AclMetadata): void; +} +export interface AddAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type AddAclResponse = [AccessControlObject, AclMetadata]; +export interface AddAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: AclMetadata): void; +} +export type RemoveAclResponse = [AclMetadata]; +export interface RemoveAclCallback { + (err: Error | null, apiResponse?: AclMetadata): void; +} +export interface RemoveAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface AccessControlObject { + entity: string; + role: string; + projectTeam: string; +} +export interface AclMetadata extends BaseMetadata { + bucket?: string; + domain?: string; + entity?: string; + entityId?: string; + generation?: string; + object?: string; + projectTeam?: { + projectNumber?: string; + team?: 'editors' | 'owners' | 'viewers'; + }; + role?: 'OWNER' | 'READER' | 'WRITER' | 'FULL_CONTROL'; + [key: string]: unknown; +} +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +declare class AclRoleAccessorMethods { + private static accessMethods; + private static entities; + private static roles; + owners: {}; + readers: {}; + writers: {}; + constructor(); + _assignAccessMethods(role: string): void; +} +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +declare class Acl extends AclRoleAccessorMethods { + default: Acl; + pathPrefix: string; + request_: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; + constructor(options: AclOptions); + add(options: AddAclOptions): Promise; + add(options: AddAclOptions, callback: AddAclCallback): void; + delete(options: RemoveAclOptions): Promise; + delete(options: RemoveAclOptions, callback: RemoveAclCallback): void; + get(options?: GetAclOptions): Promise; + get(options: GetAclOptions, callback: GetAclCallback): void; + get(callback: GetAclCallback): void; + update(options: UpdateAclOptions): Promise; + update(options: UpdateAclOptions, callback: UpdateAclCallback): void; + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject: AccessControlObject): AccessControlObject; + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export { Acl, AclRoleAccessorMethods }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/acl.js b/node_modules/@google-cloud/storage/build/cjs/src/acl.js new file mode 100644 index 0000000..194fb3b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/acl.js @@ -0,0 +1,718 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = require("@google-cloud/promisify"); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/bucket.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/bucket.d.ts new file mode 100644 index 0000000..1595d39 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/bucket.d.ts @@ -0,0 +1,801 @@ +/// +/// +/// +/// +import { ApiError, BodyResponseCallback, DecorateRequestOptions, DeleteCallback, ExistsCallback, GetConfig, MetadataCallback, ServiceObject, SetMetadataResponse } from './nodejs-common/index.js'; +import { RequestResponse } from './nodejs-common/service-object.js'; +import * as http from 'http'; +import { Acl, AclMetadata } from './acl.js'; +import { Channel } from './channel.js'; +import { File, FileOptions, CreateResumableUploadOptions, CreateWriteStreamOptions } from './file.js'; +import { Iam } from './iam.js'; +import { Notification } from './notification.js'; +import { Storage, Cors, PreconditionOptions, BucketOptions } from './storage.js'; +import { GetSignedUrlResponse, GetSignedUrlCallback, SignerGetSignedUrlConfig, URLSigner, Query } from './signer.js'; +import { Readable } from 'stream'; +import { CRC32CValidatorGenerator } from './crc32c.js'; +import { URL } from 'url'; +import { BaseMetadata, SetMetadataOptions } from './nodejs-common/service-object.js'; +export type GetFilesResponse = [File[], {}, unknown]; +export interface GetFilesCallback { + (err: Error | null, files?: File[], nextQuery?: {}, apiResponse?: unknown): void; +} +interface WatchAllOptions { + delimiter?: string; + maxResults?: number; + pageToken?: string; + prefix?: string; + projection?: string; + userProject?: string; + versions?: boolean; +} +export interface AddLifecycleRuleOptions extends PreconditionOptions { + append?: boolean; +} +export interface LifecycleAction { + type: 'Delete' | 'SetStorageClass' | 'AbortIncompleteMultipartUpload'; + storageClass?: string; +} +export interface LifecycleCondition { + age?: number; + createdBefore?: Date | string; + customTimeBefore?: Date | string; + daysSinceCustomTime?: number; + daysSinceNoncurrentTime?: number; + isLive?: boolean; + matchesPrefix?: string[]; + matchesSuffix?: string[]; + matchesStorageClass?: string[]; + noncurrentTimeBefore?: Date | string; + numNewerVersions?: number; +} +export interface LifecycleRule { + action: LifecycleAction; + condition: LifecycleCondition; +} +export interface LifecycleCondition { + age?: number; + createdBefore?: Date | string; + customTimeBefore?: Date | string; + daysSinceCustomTime?: number; + daysSinceNoncurrentTime?: number; + isLive?: boolean; + matchesPrefix?: string[]; + matchesSuffix?: string[]; + matchesStorageClass?: string[]; + noncurrentTimeBefore?: Date | string; + numNewerVersions?: number; +} +export interface LifecycleRule { + action: LifecycleAction; + condition: LifecycleCondition; +} +export interface EnableLoggingOptions extends PreconditionOptions { + bucket?: string | Bucket; + prefix: string; +} +export interface GetFilesOptions { + autoPaginate?: boolean; + delimiter?: string; + endOffset?: string; + includeFoldersAsPrefixes?: boolean; + includeTrailingDelimiter?: boolean; + prefix?: string; + matchGlob?: string; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + softDeleted?: boolean; + startOffset?: string; + userProject?: string; + versions?: boolean; +} +export interface CombineOptions extends PreconditionOptions { + kmsKeyName?: string; + userProject?: string; +} +export interface CombineCallback { + (err: Error | null, newFile: File | null, apiResponse: unknown): void; +} +export type CombineResponse = [File, unknown]; +export interface CreateChannelConfig extends WatchAllOptions { + address: string; +} +export interface CreateChannelOptions { + userProject?: string; +} +export type CreateChannelResponse = [Channel, unknown]; +export interface CreateChannelCallback { + (err: Error | null, channel: Channel | null, apiResponse: unknown): void; +} +export interface CreateNotificationOptions { + customAttributes?: { + [key: string]: string; + }; + eventTypes?: string[]; + objectNamePrefix?: string; + payloadFormat?: string; + userProject?: string; +} +export interface CreateNotificationCallback { + (err: Error | null, notification: Notification | null, apiResponse: unknown): void; +} +export type CreateNotificationResponse = [Notification, unknown]; +export interface DeleteBucketOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteBucketResponse = [unknown]; +export interface DeleteBucketCallback extends DeleteCallback { + (err: Error | null, apiResponse: unknown): void; +} +export interface DeleteFilesOptions extends GetFilesOptions, PreconditionOptions { + force?: boolean; +} +export interface DeleteFilesCallback { + (err: Error | Error[] | null, apiResponse?: object): void; +} +export type DeleteLabelsResponse = [unknown]; +export type DeleteLabelsCallback = SetLabelsCallback; +export type DeleteLabelsOptions = PreconditionOptions; +export type DisableRequesterPaysOptions = PreconditionOptions; +export type DisableRequesterPaysResponse = [unknown]; +export interface DisableRequesterPaysCallback { + (err?: Error | null, apiResponse?: object): void; +} +export type EnableRequesterPaysResponse = [unknown]; +export interface EnableRequesterPaysCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type EnableRequesterPaysOptions = PreconditionOptions; +export interface BucketExistsOptions extends GetConfig { + userProject?: string; +} +export type BucketExistsResponse = [boolean]; +export type BucketExistsCallback = ExistsCallback; +export interface GetBucketOptions extends GetConfig { + userProject?: string; +} +export type GetBucketResponse = [Bucket, unknown]; +export interface GetBucketCallback { + (err: ApiError | null, bucket: Bucket | null, apiResponse: unknown): void; +} +export interface GetLabelsOptions { + userProject?: string; +} +export type GetLabelsResponse = [unknown]; +export interface GetLabelsCallback { + (err: Error | null, labels: object | null): void; +} +export interface BucketMetadata extends BaseMetadata { + acl?: AclMetadata[] | null; + autoclass?: { + enabled?: boolean; + toggleTime?: string; + terminalStorageClass?: string; + terminalStorageClassUpdateTime?: string; + }; + billing?: { + requesterPays?: boolean; + }; + cors?: Cors[]; + customPlacementConfig?: { + dataLocations?: string[]; + }; + defaultEventBasedHold?: boolean; + defaultObjectAcl?: AclMetadata[]; + encryption?: { + defaultKmsKeyName?: string; + } | null; + iamConfiguration?: { + publicAccessPrevention?: string; + uniformBucketLevelAccess?: { + enabled?: boolean; + lockedTime?: string; + }; + }; + labels?: { + [key: string]: string | null; + }; + lifecycle?: { + rule?: LifecycleRule[]; + } | null; + location?: string; + locationType?: string; + logging?: { + logBucket?: string; + logObjectPrefix?: string; + }; + metageneration?: string; + name?: string; + objectRetention?: { + mode?: string; + }; + owner?: { + entity?: string; + entityId?: string; + }; + projectNumber?: string | number; + retentionPolicy?: { + effectiveTime?: string; + isLocked?: boolean; + retentionPeriod?: string | number; + } | null; + rpo?: string; + softDeletePolicy?: { + retentionDurationSeconds?: string | number; + readonly effectiveTime?: string; + }; + storageClass?: string; + timeCreated?: string; + updated?: string; + versioning?: { + enabled?: boolean; + }; + website?: { + mainPageSuffix?: string; + notFoundPage?: string; + }; +} +export type GetBucketMetadataResponse = [BucketMetadata, unknown]; +export interface GetBucketMetadataCallback { + (err: ApiError | null, metadata: BucketMetadata | null, apiResponse: unknown): void; +} +export interface GetBucketMetadataOptions { + userProject?: string; +} +export interface GetBucketSignedUrlConfig extends Pick { + action: 'list'; + version?: 'v2' | 'v4'; + cname?: string; + virtualHostedStyle?: boolean; + expires: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; +} +export declare enum BucketActionToHTTPMethod { + list = "GET" +} +export declare enum AvailableServiceObjectMethods { + setMetadata = 0, + delete = 1 +} +export interface GetNotificationsOptions { + userProject?: string; +} +export interface GetNotificationsCallback { + (err: Error | null, notifications: Notification[] | null, apiResponse: unknown): void; +} +export type GetNotificationsResponse = [Notification[], unknown]; +export interface MakeBucketPrivateOptions { + includeFiles?: boolean; + force?: boolean; + metadata?: BucketMetadata; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeBucketPrivateResponse = [File[]]; +export interface MakeBucketPrivateCallback { + (err?: Error | null, files?: File[]): void; +} +export interface MakeBucketPublicOptions { + includeFiles?: boolean; + force?: boolean; +} +export interface MakeBucketPublicCallback { + (err?: Error | null, files?: File[]): void; +} +export type MakeBucketPublicResponse = [File[]]; +export interface SetBucketMetadataOptions extends PreconditionOptions { + userProject?: string; +} +export type SetBucketMetadataResponse = [BucketMetadata]; +export interface SetBucketMetadataCallback { + (err?: Error | null, metadata?: BucketMetadata): void; +} +export interface BucketLockCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type BucketLockResponse = [unknown]; +export interface Labels { + [key: string]: string; +} +export interface SetLabelsOptions extends PreconditionOptions { + userProject?: string; +} +export type SetLabelsResponse = [unknown]; +export interface SetLabelsCallback { + (err?: Error | null, metadata?: unknown): void; +} +export interface SetBucketStorageClassOptions extends PreconditionOptions { + userProject?: string; +} +export interface SetBucketStorageClassCallback { + (err?: Error | null): void; +} +export type UploadResponse = [File, unknown]; +export interface UploadCallback { + (err: Error | null, file?: File | null, apiResponse?: unknown): void; +} +export interface UploadOptions extends CreateResumableUploadOptions, CreateWriteStreamOptions { + destination?: string | File; + encryptionKey?: string | Buffer; + kmsKeyName?: string; + onUploadProgress?: (progressEvent: any) => void; +} +export interface MakeAllFilesPublicPrivateOptions { + force?: boolean; + private?: boolean; + public?: boolean; + userProject?: string; +} +interface MakeAllFilesPublicPrivateCallback { + (err?: Error | Error[] | null, files?: File[]): void; +} +type MakeAllFilesPublicPrivateResponse = [File[]]; +export declare enum BucketExceptionMessages { + PROVIDE_SOURCE_FILE = "You must provide at least one source file.", + DESTINATION_FILE_NOT_SPECIFIED = "A destination file must be specified.", + CHANNEL_ID_REQUIRED = "An ID is required to create a channel.", + TOPIC_NAME_REQUIRED = "A valid topic name is required.", + CONFIGURATION_OBJECT_PREFIX_REQUIRED = "A configuration object with a prefix is required.", + SPECIFY_FILE_NAME = "A file name must be specified.", + METAGENERATION_NOT_PROVIDED = "A metageneration must be provided.", + SUPPLY_NOTIFICATION_ID = "You must supply a notification ID." +} +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +declare class Bucket extends ServiceObject { + name: string; + /** + * A reference to the {@link Storage} associated with this {@link Bucket} + * instance. + * @name Bucket#storage + * @type {Storage} + */ + storage: Storage; + /** + * A user project to apply to each request from this bucket. + * @name Bucket#userProject + * @type {string} + */ + userProject?: string; + acl: Acl; + iam: Iam; + crc32cGenerator: CRC32CValidatorGenerator; + getFilesStream(query?: GetFilesOptions): Readable; + signer?: URLSigner; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + constructor(storage: Storage, name: string, options?: BucketOptions); + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options?: AddLifecycleRuleOptions): Promise; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options: AddLifecycleRuleOptions, callback: SetBucketMetadataCallback): void; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], callback: SetBucketMetadataCallback): void; + combine(sources: string[] | File[], destination: string | File, options?: CombineOptions): Promise; + combine(sources: string[] | File[], destination: string | File, options: CombineOptions, callback: CombineCallback): void; + combine(sources: string[] | File[], destination: string | File, callback: CombineCallback): void; + createChannel(id: string, config: CreateChannelConfig, options?: CreateChannelOptions): Promise; + createChannel(id: string, config: CreateChannelConfig, callback: CreateChannelCallback): void; + createChannel(id: string, config: CreateChannelConfig, options: CreateChannelOptions, callback: CreateChannelCallback): void; + createNotification(topic: string, options?: CreateNotificationOptions): Promise; + createNotification(topic: string, options: CreateNotificationOptions, callback: CreateNotificationCallback): void; + createNotification(topic: string, callback: CreateNotificationCallback): void; + deleteFiles(query?: DeleteFilesOptions): Promise; + deleteFiles(callback: DeleteFilesCallback): void; + deleteFiles(query: DeleteFilesOptions, callback: DeleteFilesCallback): void; + deleteLabels(labels?: string | string[]): Promise; + deleteLabels(options: DeleteLabelsOptions): Promise; + deleteLabels(callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions): Promise; + deleteLabels(labels: string | string[], callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions, callback: DeleteLabelsCallback): void; + disableRequesterPays(options?: DisableRequesterPaysOptions): Promise; + disableRequesterPays(callback: DisableRequesterPaysCallback): void; + disableRequesterPays(options: DisableRequesterPaysOptions, callback: DisableRequesterPaysCallback): void; + enableLogging(config: EnableLoggingOptions): Promise; + enableLogging(config: EnableLoggingOptions, callback: SetBucketMetadataCallback): void; + enableRequesterPays(options?: EnableRequesterPaysOptions): Promise; + enableRequesterPays(callback: EnableRequesterPaysCallback): void; + enableRequesterPays(options: EnableRequesterPaysOptions, callback: EnableRequesterPaysCallback): void; + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name: string, options?: FileOptions): File; + getFiles(query?: GetFilesOptions): Promise; + getFiles(query: GetFilesOptions, callback: GetFilesCallback): void; + getFiles(callback: GetFilesCallback): void; + getLabels(options?: GetLabelsOptions): Promise; + getLabels(callback: GetLabelsCallback): void; + getLabels(options: GetLabelsOptions, callback: GetLabelsCallback): void; + getNotifications(options?: GetNotificationsOptions): Promise; + getNotifications(callback: GetNotificationsCallback): void; + getNotifications(options: GetNotificationsOptions, callback: GetNotificationsCallback): void; + getSignedUrl(cfg: GetBucketSignedUrlConfig): Promise; + getSignedUrl(cfg: GetBucketSignedUrlConfig, callback: GetSignedUrlCallback): void; + lock(metageneration: number | string): Promise; + lock(metageneration: number | string, callback: BucketLockCallback): void; + makePrivate(options?: MakeBucketPrivateOptions): Promise; + makePrivate(callback: MakeBucketPrivateCallback): void; + makePrivate(options: MakeBucketPrivateOptions, callback: MakeBucketPrivateCallback): void; + makePublic(options?: MakeBucketPublicOptions): Promise; + makePublic(callback: MakeBucketPublicCallback): void; + makePublic(options: MakeBucketPublicOptions, callback: MakeBucketPublicCallback): void; + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id: string): Notification; + removeRetentionPeriod(options?: SetBucketMetadataOptions): Promise; + removeRetentionPeriod(callback: SetBucketMetadataCallback): void; + removeRetentionPeriod(options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + setLabels(labels: Labels, options?: SetLabelsOptions): Promise; + setLabels(labels: Labels, callback: SetLabelsCallback): void; + setLabels(labels: Labels, options: SetLabelsOptions, callback: SetLabelsCallback): void; + setMetadata(metadata: BucketMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: BucketMetadata, callback: MetadataCallback): void; + setMetadata(metadata: BucketMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setRetentionPeriod(duration: number, options?: SetBucketMetadataOptions): Promise; + setRetentionPeriod(duration: number, callback: SetBucketMetadataCallback): void; + setRetentionPeriod(duration: number, options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options?: SetBucketMetadataOptions): Promise; + setCorsConfiguration(corsConfiguration: Cors[], callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setStorageClass(storageClass: string, options?: SetBucketStorageClassOptions): Promise; + setStorageClass(storageClass: string, callback: SetBucketStorageClassCallback): void; + setStorageClass(storageClass: string, options: SetBucketStorageClassOptions, callback: SetBucketStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + upload(pathString: string, options?: UploadOptions): Promise; + upload(pathString: string, options: UploadOptions, callback: UploadCallback): void; + upload(pathString: string, callback: UploadCallback): void; + makeAllFilesPublicPrivate_(options?: MakeAllFilesPublicPrivateOptions): Promise; + makeAllFilesPublicPrivate_(callback: MakeAllFilesPublicPrivateCallback): void; + makeAllFilesPublicPrivate_(options: MakeAllFilesPublicPrivateOptions, callback: MakeAllFilesPublicPrivateCallback): void; + getId(): string; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; +} +/** + * Reference to the {@link Bucket} class. + * @name module:@google-cloud/storage.Bucket + * @see Bucket + */ +export { Bucket }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/bucket.js b/node_modules/@google-cloud/storage/build/cjs/src/bucket.js new file mode 100644 index 0000000..903bb70 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/bucket.js @@ -0,0 +1,3523 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const paginator_1 = require("@google-cloud/paginator"); +const promisify_1 = require("@google-cloud/promisify"); +const fs = __importStar(require("fs")); +const mime = __importStar(require("mime-types")); +const path = __importStar(require("path")); +const p_limit_1 = __importDefault(require("p-limit")); +const util_1 = require("util"); +const async_retry_1 = __importDefault(require("async-retry")); +const util_js_1 = require("./util.js"); +const acl_js_1 = require("./acl.js"); +const file_js_1 = require("./file.js"); +const iam_js_1 = require("./iam.js"); +const notification_js_1 = require("./notification.js"); +const storage_js_1 = require("./storage.js"); +const signer_js_1 = require("./signer.js"); +const stream_1 = require("stream"); +const url_1 = require("url"); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime.contentType(destinationFile.name); + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification'], +}); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/channel.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/channel.d.ts new file mode 100644 index 0000000..6bb52b9 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/channel.d.ts @@ -0,0 +1,33 @@ +import { BaseMetadata, ServiceObject } from './nodejs-common/index.js'; +import { Storage } from './storage.js'; +export interface StopCallback { + (err: Error | null, apiResponse?: unknown): void; +} +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +declare class Channel extends ServiceObject { + constructor(storage: Storage, id: string, resourceId: string); + stop(): Promise; + stop(callback: StopCallback): void; +} +/** + * Reference to the {@link Channel} class. + * @name module:@google-cloud/storage.Channel + * @see Channel + */ +export { Channel }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/channel.js b/node_modules/@google-cloud/storage/build/cjs/src/channel.js new file mode 100644 index 0000000..5b9c91c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/channel.js @@ -0,0 +1,104 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Channel = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/crc32c.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/crc32c.d.ts new file mode 100644 index 0000000..a15bd96 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/crc32c.d.ts @@ -0,0 +1,145 @@ +/// +/// +import { PathLike } from 'fs'; +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +declare const CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; +declare const CRC32C_EXTENSION_TABLE: Int32Array; +/** An interface for CRC32C hashing and validation */ +interface CRC32CValidator { + /** + * A method returning the CRC32C as a base64-encoded string. + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + toString: () => string; + /** + * A method validating a base64-encoded CRC32C string. + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + */ + validate: (value: string) => boolean; + /** + * A method for passing `Buffer`s for CRC32C generation. + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + */ + update: (data: Buffer) => void; +} +/** A function that generates a CRC32C Validator */ +interface CRC32CValidatorGenerator { + /** Should return a new, ready-to-use `CRC32CValidator` */ + (): CRC32CValidator; +} +declare const CRC32C_DEFAULT_VALIDATOR_GENERATOR: CRC32CValidatorGenerator; +declare const CRC32C_EXCEPTION_MESSAGES: { + readonly INVALID_INIT_BASE64_RANGE: (l: number) => string; + readonly INVALID_INIT_BUFFER_LENGTH: (l: number) => string; + readonly INVALID_INIT_INTEGER: (l: number) => string; +}; +declare class CRC32C implements CRC32CValidator { + #private; + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue?: number); + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data: Buffer): void; + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input: Buffer | CRC32CValidator | string | number): boolean; + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer(): Buffer; + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON(): string; + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString(): string; + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf(): number; + static readonly CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; + static readonly CRC32C_EXTENSION_TABLE: Int32Array; + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + private static fromBuffer; + static fromFile(file: PathLike): Promise; + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + private static fromString; + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + private static fromNumber; + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value: ArrayBuffer | ArrayBufferView | CRC32CValidator | string | number): CRC32C; +} +export { CRC32C, CRC32C_DEFAULT_VALIDATOR_GENERATOR, CRC32C_EXCEPTION_MESSAGES, CRC32C_EXTENSIONS, CRC32C_EXTENSION_TABLE, CRC32CValidator, CRC32CValidatorGenerator, }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/crc32c.js b/node_modules/@google-cloud/storage/build/cjs/src/crc32c.js new file mode 100644 index 0000000..8847edd --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/crc32c.js @@ -0,0 +1,254 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = require("fs"); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => crc32c.update(d)) + .on('end', resolve) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/file.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/file.d.ts new file mode 100644 index 0000000..2b315a8 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/file.d.ts @@ -0,0 +1,961 @@ +/// +/// +/// +/// +import { BodyResponseCallback, DecorateRequestOptions, GetConfig, MetadataCallback, ServiceObject, SetMetadataResponse } from './nodejs-common/index.js'; +import * as resumableUpload from './resumable-upload.js'; +import { Writable, Readable, PipelineSource } from 'stream'; +import * as http from 'http'; +import { PreconditionOptions, Storage } from './storage.js'; +import { AvailableServiceObjectMethods, Bucket } from './bucket.js'; +import { Acl, AclMetadata } from './acl.js'; +import { GetSignedUrlResponse, GetSignedUrlCallback, URLSigner, SignerGetSignedUrlConfig, Query } from './signer.js'; +import { Duplexify, GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +import { CRC32C, CRC32CValidatorGenerator } from './crc32c.js'; +import { URL } from 'url'; +import { BaseMetadata, DeleteCallback, DeleteOptions, GetResponse, InstanceResponseCallback, RequestResponse, SetMetadataOptions } from './nodejs-common/service-object.js'; +import * as r from 'teeny-request'; +export type GetExpirationDateResponse = [Date]; +export interface GetExpirationDateCallback { + (err: Error | null, expirationDate?: Date | null, apiResponse?: unknown): void; +} +export interface PolicyDocument { + string: string; + base64: string; + signature: string; +} +export type SaveData = string | Buffer | PipelineSource; +export type GenerateSignedPostPolicyV2Response = [PolicyDocument]; +export interface GenerateSignedPostPolicyV2Callback { + (err: Error | null, policy?: PolicyDocument): void; +} +export interface GenerateSignedPostPolicyV2Options { + equals?: string[] | string[][]; + expires: string | number | Date; + startsWith?: string[] | string[][]; + acl?: string; + successRedirect?: string; + successStatus?: string; + contentLengthRange?: { + min?: number; + max?: number; + }; + /** + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string; +} +export interface PolicyFields { + [key: string]: string; +} +export interface GenerateSignedPostPolicyV4Options { + expires: string | number | Date; + bucketBoundHostname?: string; + virtualHostedStyle?: boolean; + conditions?: object[]; + fields?: PolicyFields; + /** + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string; +} +export interface GenerateSignedPostPolicyV4Callback { + (err: Error | null, output?: SignedPostPolicyV4Output): void; +} +export type GenerateSignedPostPolicyV4Response = [SignedPostPolicyV4Output]; +export interface SignedPostPolicyV4Output { + url: string; + fields: PolicyFields; +} +export interface GetSignedUrlConfig extends Pick { + action: 'read' | 'write' | 'delete' | 'resumable'; + version?: 'v2' | 'v4'; + virtualHostedStyle?: boolean; + cname?: string; + contentMd5?: string; + contentType?: string; + expires: string | number | Date; + accessibleAt?: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + promptSaveAs?: string; + responseDisposition?: string; + responseType?: string; + queryParams?: Query; +} +export interface GetFileMetadataOptions { + userProject?: string; +} +export type GetFileMetadataResponse = [FileMetadata, unknown]; +export interface GetFileMetadataCallback { + (err: Error | null, metadata?: FileMetadata, apiResponse?: unknown): void; +} +export interface GetFileOptions extends GetConfig { + userProject?: string; + generation?: number; + softDeleted?: boolean; +} +export type GetFileResponse = [File, unknown]; +export interface GetFileCallback { + (err: Error | null, file?: File, apiResponse?: unknown): void; +} +export interface FileExistsOptions { + userProject?: string; +} +export type FileExistsResponse = [boolean]; +export interface FileExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface DeleteFileOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteFileResponse = [unknown]; +export interface DeleteFileCallback { + (err: Error | null, apiResponse?: unknown): void; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'isPartialUpload' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject'; +export interface CreateResumableUploadOptions extends Pick { + /** + * A CRC32C to resume from when continuing a previous upload. It is recommended + * to capture the `crc32c` event from previous upload sessions to provide in + * subsequent requests in order to accurately track the upload. This is **required** + * when validating a final portion of the uploaded object. + * + * @see {@link CRC32C.from} for possible values. + */ + resumeCRC32C?: Parameters<(typeof CRC32C)['from']>[0]; + preconditionOpts?: PreconditionOptions; + [GCCL_GCS_CMD_KEY]?: resumableUpload.UploadConfig[typeof GCCL_GCS_CMD_KEY]; +} +export type CreateResumableUploadResponse = [string]; +export interface CreateResumableUploadCallback { + (err: Error | null, uri?: string): void; +} +export interface CreateWriteStreamOptions extends CreateResumableUploadOptions { + contentType?: string; + gzip?: string | boolean; + resumable?: boolean; + timeout?: number; + validation?: string | boolean; +} +export interface MakeFilePrivateOptions { + metadata?: FileMetadata; + strict?: boolean; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeFilePrivateResponse = [unknown]; +export type MakeFilePrivateCallback = SetFileMetadataCallback; +export interface IsPublicCallback { + (err: Error | null, resp?: boolean): void; +} +export type IsPublicResponse = [boolean]; +export type MakeFilePublicResponse = [unknown]; +export interface MakeFilePublicCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type MoveResponse = [unknown]; +export interface MoveCallback { + (err: Error | null, destinationFile?: File | null, apiResponse?: unknown): void; +} +export interface MoveOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type RenameOptions = MoveOptions; +export type RenameResponse = MoveResponse; +export type RenameCallback = MoveCallback; +export type RotateEncryptionKeyOptions = string | Buffer | EncryptionKeyOptions; +export interface EncryptionKeyOptions { + encryptionKey?: string | Buffer; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; +} +export type RotateEncryptionKeyCallback = CopyCallback; +export type RotateEncryptionKeyResponse = CopyResponse; +export declare enum ActionToHTTPMethod { + read = "GET", + write = "PUT", + delete = "DELETE", + resumable = "POST" +} +/** + * @deprecated - no longer used + */ +export declare const STORAGE_POST_POLICY_BASE_URL = "https://storage.googleapis.com"; +export interface FileOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + encryptionKey?: string | Buffer; + generation?: number | string; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface CopyOptions { + cacheControl?: string; + contentEncoding?: string; + contentType?: string; + contentDisposition?: string; + destinationKmsKeyName?: string; + metadata?: FileMetadata; + predefinedAcl?: string; + token?: string; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type CopyResponse = [File, unknown]; +export interface CopyCallback { + (err: Error | null, file?: File | null, apiResponse?: unknown): void; +} +export type DownloadResponse = [Buffer]; +export type DownloadCallback = (err: RequestError | null, contents: Buffer) => void; +export interface DownloadOptions extends CreateReadStreamOptions { + destination?: string; +} +export interface CreateReadStreamOptions { + userProject?: string; + validation?: 'md5' | 'crc32c' | false | true; + start?: number; + end?: number; + decompress?: boolean; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface SaveOptions extends CreateWriteStreamOptions { + onUploadProgress?: (progressEvent: any) => void; +} +export interface SaveCallback { + (err?: Error | null): void; +} +export interface SetFileMetadataOptions { + userProject?: string; +} +export interface SetFileMetadataCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type SetFileMetadataResponse = [unknown]; +export type SetStorageClassResponse = [unknown]; +export interface SetStorageClassOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export interface SetStorageClassCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export interface RestoreOptions extends PreconditionOptions { + generation: number; + projection?: 'full' | 'noAcl'; +} +export interface FileMetadata extends BaseMetadata { + acl?: AclMetadata[] | null; + bucket?: string; + cacheControl?: string; + componentCount?: number; + contentDisposition?: string; + contentEncoding?: string; + contentLanguage?: string; + contentType?: string; + crc32c?: string; + customerEncryption?: { + encryptionAlgorithm?: string; + keySha256?: string; + }; + customTime?: string; + eventBasedHold?: boolean | null; + readonly eventBasedHoldReleaseTime?: string; + generation?: string | number; + hardDeleteTime?: string; + kmsKeyName?: string; + md5Hash?: string; + mediaLink?: string; + metadata?: { + [key: string]: string; + }; + metageneration?: string | number; + name?: string; + owner?: { + entity?: string; + entityId?: string; + }; + retention?: { + retainUntilTime?: string; + mode?: string; + } | null; + retentionExpirationTime?: string; + size?: string | number; + softDeleteTime?: string; + storageClass?: string; + temporaryHold?: boolean | null; + timeCreated?: string; + timeDeleted?: string; + timeStorageClassUpdated?: string; + updated?: string; +} +export declare class RequestError extends Error { + code?: string; + errors?: Error[]; +} +export declare enum FileExceptionMessages { + EXPIRATION_TIME_NA = "An expiration time is not available.", + DESTINATION_NO_NAME = "Destination file should have a name.", + INVALID_VALIDATION_FILE_RANGE = "Cannot use validation with file ranges (start/end).", + MD5_NOT_AVAILABLE = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.", + EQUALS_CONDITION_TWO_ELEMENTS = "Equals condition must be an array of 2 elements.", + STARTS_WITH_TWO_ELEMENTS = "StartsWith condition must be an array of 2 elements.", + CONTENT_LENGTH_RANGE_MIN_MAX = "ContentLengthRange must have numeric min & max fields.", + DOWNLOAD_MISMATCH = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.", + UPLOAD_MISMATCH_DELETE_FAIL = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ", + UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again.", + MD5_RESUMED_UPLOAD = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value", + MISSING_RESUME_CRC32C_FINAL_UPLOAD = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`." +} +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +declare class File extends ServiceObject { + #private; + acl: Acl; + crc32cGenerator: CRC32CValidatorGenerator; + bucket: Bucket; + storage: Storage; + kmsKeyName?: string; + userProject?: string; + signer?: URLSigner; + name: string; + generation?: number; + parent: Bucket; + private encryptionKey?; + private encryptionKeyBase64?; + private encryptionKeyHash?; + private encryptionKeyInterceptor?; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket: Bucket, name: string, options?: FileOptions); + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + private shouldRetryBasedOnPreconditionAndIdempotencyStrat; + copy(destination: string | Bucket | File, options?: CopyOptions): Promise; + copy(destination: string | Bucket | File, callback: CopyCallback): void; + copy(destination: string | Bucket | File, options: CopyOptions, callback: CopyCallback): void; + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options?: CreateReadStreamOptions): Readable; + createResumableUpload(options?: CreateResumableUploadOptions): Promise; + createResumableUpload(options: CreateResumableUploadOptions, callback: CreateResumableUploadCallback): void; + createResumableUpload(callback: CreateResumableUploadCallback): void; + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + createWriteStream(options?: CreateWriteStreamOptions): Writable; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + download(options?: DownloadOptions): Promise; + download(options: DownloadOptions, callback: DownloadCallback): void; + download(callback: DownloadCallback): void; + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey: string | Buffer): this; + get(options?: GetFileOptions): Promise>; + get(callback: InstanceResponseCallback): void; + get(options: GetFileOptions, callback: InstanceResponseCallback): void; + getExpirationDate(): Promise; + getExpirationDate(callback: GetExpirationDateCallback): void; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options): Promise; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options, callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV2(callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options): Promise; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options, callback: GenerateSignedPostPolicyV4Callback): void; + generateSignedPostPolicyV4(callback: GenerateSignedPostPolicyV4Callback): void; + getSignedUrl(cfg: GetSignedUrlConfig): Promise; + getSignedUrl(cfg: GetSignedUrlConfig, callback: GetSignedUrlCallback): void; + isPublic(): Promise; + isPublic(callback: IsPublicCallback): void; + makePrivate(options?: MakeFilePrivateOptions): Promise; + makePrivate(callback: MakeFilePrivateCallback): void; + makePrivate(options: MakeFilePrivateOptions, callback: MakeFilePrivateCallback): void; + makePublic(): Promise; + makePublic(callback: MakeFilePublicCallback): void; + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl(): string; + move(destination: string | Bucket | File, options?: MoveOptions): Promise; + move(destination: string | Bucket | File, callback: MoveCallback): void; + move(destination: string | Bucket | File, options: MoveOptions, callback: MoveCallback): void; + rename(destinationFile: string | File, options?: RenameOptions): Promise; + rename(destinationFile: string | File, callback: RenameCallback): void; + rename(destinationFile: string | File, options: RenameOptions, callback: RenameCallback): void; + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + restore(options: RestoreOptions): Promise; + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + rotateEncryptionKey(options?: RotateEncryptionKeyOptions): Promise; + rotateEncryptionKey(callback: RotateEncryptionKeyCallback): void; + rotateEncryptionKey(options: RotateEncryptionKeyOptions, callback: RotateEncryptionKeyCallback): void; + save(data: SaveData, options?: SaveOptions): Promise; + save(data: SaveData, callback: SaveCallback): void; + save(data: SaveData, options: SaveOptions, callback: SaveCallback): void; + setMetadata(metadata: FileMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: FileMetadata, callback: MetadataCallback): void; + setMetadata(metadata: FileMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setStorageClass(storageClass: string, options?: SetStorageClassOptions): Promise; + setStorageClass(storageClass: string, options: SetStorageClassOptions, callback: SetStorageClassCallback): void; + setStorageClass(storageClass: string, callback?: SetStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup: Duplexify, options?: CreateResumableUploadOptions): void; + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup: Duplexify, options?: CreateWriteStreamOptions): void; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; + private getBufferFromReadable; +} +/** + * Reference to the {@link File} class. + * @name module:@google-cloud/storage.File + * @see File + */ +export { File }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/file.js b/node_modules/@google-cloud/storage/build/cjs/src/file.js new file mode 100644 index 0000000..34c23a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/file.js @@ -0,0 +1,3348 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const promisify_1 = require("@google-cloud/promisify"); +const compressible_1 = __importDefault(require("compressible")); +const crypto = __importStar(require("crypto")); +const fs = __importStar(require("fs")); +const mime_1 = __importDefault(require("mime")); +const resumableUpload = __importStar(require("./resumable-upload.js")); +const stream_1 = require("stream"); +const zlib = __importStar(require("zlib")); +const storage_js_1 = require("./storage.js"); +const bucket_js_1 = require("./bucket.js"); +const acl_js_1 = require("./acl.js"); +const signer_js_1 = require("./signer.js"); +const util_js_1 = require("./nodejs-common/util.js"); +const duplexify_1 = __importDefault(require("duplexify")); +const util_js_2 = require("./util.js"); +const crc32c_js_1 = require("./crc32c.js"); +const hash_stream_validator_js_1 = require("./hash-stream-validator.js"); +const async_retry_1 = __importDefault(require("async-retry")); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = (0, compressible_1.default)(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + // We know that the file exists the server - now we can truncate/write to a file + receivedData = true; + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', callback) + .on('finish', callback); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || Buffer.isBuffer(data)) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const uploadStream = resumableUpload.upload({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }); + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.d.ts new file mode 100644 index 0000000..00d85d1 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.d.ts @@ -0,0 +1,37 @@ +/// +/// +import { Transform } from 'stream'; +import { CRC32CValidatorGenerator, CRC32CValidator } from './crc32c.js'; +interface HashStreamValidatorOptions { + /** Enables CRC32C calculation. To validate a provided value use `crc32cExpected`. */ + crc32c: boolean; + /** Enables MD5 calculation. To validate a provided value use `md5Expected`. */ + md5: boolean; + /** A CRC32C instance for validation. To validate a provided value use `crc32cExpected`. */ + crc32cInstance: CRC32CValidator; + /** Set a custom CRC32C generator. Used if `crc32cInstance` has not been provided. */ + crc32cGenerator: CRC32CValidatorGenerator; + /** Sets the expected CRC32C value to verify once all data has been consumed. Also sets the `crc32c` option to `true` */ + crc32cExpected?: string; + /** Sets the expected MD5 value to verify once all data has been consumed. Also sets the `md5` option to `true` */ + md5Expected?: string; + /** Indicates whether or not to run a validation check or only update the hash values */ + updateHashesOnly?: boolean; +} +declare class HashStreamValidator extends Transform { + #private; + readonly crc32cEnabled: boolean; + readonly md5Enabled: boolean; + readonly crc32cExpected: string | undefined; + readonly md5Expected: string | undefined; + readonly updateHashesOnly: boolean; + constructor(options?: Partial); + /** + * Return the current CRC32C value, if available. + */ + get crc32c(): string | undefined; + _flush(callback: (error?: Error | null | undefined) => void): void; + _transform(chunk: Buffer, encoding: BufferEncoding, callback: (e?: Error) => void): void; + test(hash: 'crc32c' | 'md5', sum: Buffer | string): boolean; +} +export { HashStreamValidator, HashStreamValidatorOptions }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.js b/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.js new file mode 100644 index 0000000..ef61e0c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/hash-stream-validator.js @@ -0,0 +1,119 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HashStreamValidator = void 0; +const crypto_1 = require("crypto"); +const stream_1 = require("stream"); +const crc32c_js_1 = require("./crc32c.js"); +const file_js_1 = require("./file.js"); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.d.ts new file mode 100644 index 0000000..4433a83 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.d.ts @@ -0,0 +1,93 @@ +import { ServiceObject, MetadataCallback, SetMetadataResponse } from './nodejs-common/index.js'; +import { BaseMetadata, SetMetadataOptions } from './nodejs-common/service-object.js'; +import { Storage } from './storage.js'; +export interface HmacKeyOptions { + projectId?: string; +} +export interface HmacKeyMetadata extends BaseMetadata { + accessId?: string; + etag?: string; + projectId?: string; + serviceAccountEmail?: string; + state?: string; + timeCreated?: string; + updated?: string; +} +export interface SetHmacKeyMetadataOptions { + /** + * This parameter is currently ignored. + */ + userProject?: string; +} +export interface SetHmacKeyMetadata { + state?: 'ACTIVE' | 'INACTIVE'; + etag?: string; +} +export interface HmacKeyMetadataCallback { + (err: Error | null, metadata?: HmacKeyMetadata, apiResponse?: unknown): void; +} +export type HmacKeyMetadataResponse = [HmacKeyMetadata, unknown]; +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +export declare class HmacKey extends ServiceObject { + /** + * A reference to the {@link Storage} associated with this {@link HmacKey} + * instance. + * @name HmacKey#storage + * @type {Storage} + */ + storage: Storage; + private instanceRetryValue?; + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage: Storage, accessId: string, options?: HmacKeyOptions); + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: HmacKeyMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: HmacKeyMetadata, callback: MetadataCallback): void; + setMetadata(metadata: HmacKeyMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; +} diff --git a/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.js b/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.js new file mode 100644 index 0000000..aaf654a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/hmacKey.js @@ -0,0 +1,336 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HmacKey = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const storage_js_1 = require("./storage.js"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/iam.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/iam.d.ts new file mode 100644 index 0000000..3ceb19c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/iam.d.ts @@ -0,0 +1,117 @@ +import { Bucket } from './bucket.js'; +export interface GetPolicyOptions { + userProject?: string; + requestedPolicyVersion?: number; +} +export type GetPolicyResponse = [Policy, unknown]; +/** + * @callback GetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface GetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: unknown): void; +} +/** + * @typedef {object} SetPolicyOptions + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface SetPolicyOptions { + userProject?: string; +} +/** + * @typedef {array} SetPolicyResponse + * @property {object} 0 The policy. + * @property {object} 1 The full API response. + */ +export type SetPolicyResponse = [Policy, unknown]; +/** + * @callback SetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface SetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: object): void; +} +export interface Policy { + bindings: PolicyBinding[]; + version?: number; + etag?: string; +} +export interface PolicyBinding { + role: string; + members: string[]; + condition?: Expr; +} +export interface Expr { + title?: string; + description?: string; + expression: string; +} +/** + * @typedef {array} TestIamPermissionsResponse + * @property {object} 0 A subset of permissions that the caller is allowed. + * @property {object} 1 The full API response. + */ +export type TestIamPermissionsResponse = [{ + [key: string]: boolean; +}, unknown]; +/** + * @callback TestIamPermissionsCallback + * @param {?Error} err Request error, if any. + * @param {object} acl A subset of permissions that the caller is allowed. + * @param {object} apiResponse The full API response. + */ +export interface TestIamPermissionsCallback { + (err?: Error | null, acl?: { + [key: string]: boolean; + } | null, apiResponse?: unknown): void; +} +/** + * @typedef {object} TestIamPermissionsOptions Configuration options for Iam#testPermissions(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface TestIamPermissionsOptions { + userProject?: string; +} +export declare enum IAMExceptionMessages { + POLICY_OBJECT_REQUIRED = "A policy object is required.", + PERMISSIONS_REQUIRED = "Permissions are required." +} +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +declare class Iam { + private request_; + private resourceId_; + constructor(bucket: Bucket); + getPolicy(options?: GetPolicyOptions): Promise; + getPolicy(options: GetPolicyOptions, callback: GetPolicyCallback): void; + getPolicy(callback: GetPolicyCallback): void; + setPolicy(policy: Policy, options?: SetPolicyOptions): Promise; + setPolicy(policy: Policy, callback: SetPolicyCallback): void; + setPolicy(policy: Policy, options: SetPolicyOptions, callback: SetPolicyCallback): void; + testPermissions(permissions: string | string[], options?: TestIamPermissionsOptions): Promise; + testPermissions(permissions: string | string[], callback: TestIamPermissionsCallback): void; + testPermissions(permissions: string | string[], options: TestIamPermissionsOptions, callback: TestIamPermissionsCallback): void; +} +export { Iam }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/iam.js b/node_modules/@google-cloud/storage/build/cjs/src/iam.js new file mode 100644 index 0000000..9d4ce67 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/iam.js @@ -0,0 +1,306 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = require("@google-cloud/promisify"); +const util_js_1 = require("./util.js"); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/index.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/index.d.ts new file mode 100644 index 0000000..6da8101 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/index.d.ts @@ -0,0 +1,57 @@ +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +export { ApiError } from './nodejs-common/index.js'; +export { BucketCallback, BucketOptions, CreateBucketQuery, CreateBucketRequest, CreateBucketResponse, CreateHmacKeyCallback, CreateHmacKeyOptions, CreateHmacKeyResponse, GetBucketsCallback, GetBucketsRequest, GetBucketsResponse, GetHmacKeysCallback, GetHmacKeysOptions, GetHmacKeysResponse, GetServiceAccountCallback, GetServiceAccountOptions, GetServiceAccountResponse, HmacKeyResourceResponse, IdempotencyStrategy, PreconditionOptions, ServiceAccount, Storage, StorageOptions, } from './storage.js'; +export { AclMetadata, AccessControlObject, AclOptions, AddAclCallback, AddAclOptions, AddAclResponse, GetAclCallback, GetAclOptions, GetAclResponse, RemoveAclCallback, RemoveAclOptions, RemoveAclResponse, UpdateAclCallback, UpdateAclOptions, UpdateAclResponse, } from './acl.js'; +export { Bucket, BucketExistsCallback, BucketExistsOptions, BucketExistsResponse, BucketLockCallback, BucketLockResponse, BucketMetadata, CombineCallback, CombineOptions, CombineResponse, CreateChannelCallback, CreateChannelConfig, CreateChannelOptions, CreateChannelResponse, CreateNotificationCallback, CreateNotificationOptions, CreateNotificationResponse, DeleteBucketCallback, DeleteBucketOptions, DeleteBucketResponse, DeleteFilesCallback, DeleteFilesOptions, DeleteLabelsCallback, DeleteLabelsResponse, DisableRequesterPaysCallback, DisableRequesterPaysResponse, EnableRequesterPaysCallback, EnableRequesterPaysResponse, GetBucketCallback, GetBucketMetadataCallback, GetBucketMetadataOptions, GetBucketMetadataResponse, GetBucketOptions, GetBucketResponse, GetBucketSignedUrlConfig, GetFilesCallback, GetFilesOptions, GetFilesResponse, GetLabelsCallback, GetLabelsOptions, GetLabelsResponse, GetNotificationsCallback, GetNotificationsOptions, GetNotificationsResponse, Labels, LifecycleAction, LifecycleCondition, LifecycleRule, MakeBucketPrivateCallback, MakeBucketPrivateOptions, MakeBucketPrivateResponse, MakeBucketPublicCallback, MakeBucketPublicOptions, MakeBucketPublicResponse, SetBucketMetadataCallback, SetBucketMetadataOptions, SetBucketMetadataResponse, SetBucketStorageClassCallback, SetBucketStorageClassOptions, SetLabelsCallback, SetLabelsOptions, SetLabelsResponse, UploadCallback, UploadOptions, UploadResponse, } from './bucket.js'; +export * from './crc32c.js'; +export { Channel, StopCallback } from './channel.js'; +export { CopyCallback, CopyOptions, CopyResponse, CreateReadStreamOptions, CreateResumableUploadCallback, CreateResumableUploadOptions, CreateResumableUploadResponse, CreateWriteStreamOptions, DeleteFileCallback, DeleteFileOptions, DeleteFileResponse, DownloadCallback, DownloadOptions, DownloadResponse, EncryptionKeyOptions, File, FileExistsCallback, FileExistsOptions, FileExistsResponse, FileMetadata, FileOptions, GetExpirationDateCallback, GetExpirationDateResponse, GetFileCallback, GetFileMetadataCallback, GetFileMetadataOptions, GetFileMetadataResponse, GetFileOptions, GetFileResponse, GenerateSignedPostPolicyV2Callback, GenerateSignedPostPolicyV2Options, GenerateSignedPostPolicyV2Response, GenerateSignedPostPolicyV4Callback, GenerateSignedPostPolicyV4Options, GenerateSignedPostPolicyV4Response, GetSignedUrlConfig, MakeFilePrivateCallback, MakeFilePrivateOptions, MakeFilePrivateResponse, MakeFilePublicCallback, MakeFilePublicResponse, MoveCallback, MoveOptions, MoveResponse, PolicyDocument, PolicyFields, PredefinedAcl, RotateEncryptionKeyCallback, RotateEncryptionKeyOptions, RotateEncryptionKeyResponse, SaveCallback, SaveOptions, SetFileMetadataCallback, SetFileMetadataOptions, SetFileMetadataResponse, SetStorageClassCallback, SetStorageClassOptions, SetStorageClassResponse, SignedPostPolicyV4Output, } from './file.js'; +export * from './hash-stream-validator.js'; +export { HmacKey, HmacKeyMetadata, HmacKeyMetadataCallback, HmacKeyMetadataResponse, SetHmacKeyMetadata, SetHmacKeyMetadataOptions, } from './hmacKey.js'; +export { GetPolicyCallback, GetPolicyOptions, GetPolicyResponse, Iam, Policy, SetPolicyCallback, SetPolicyOptions, SetPolicyResponse, TestIamPermissionsCallback, TestIamPermissionsOptions, TestIamPermissionsResponse, } from './iam.js'; +export { DeleteNotificationCallback, DeleteNotificationOptions, GetNotificationCallback, GetNotificationMetadataCallback, GetNotificationMetadataOptions, GetNotificationMetadataResponse, GetNotificationOptions, GetNotificationResponse, Notification, NotificationMetadata, } from './notification.js'; +export { GetSignedUrlCallback, GetSignedUrlResponse } from './signer.js'; +export * from './transfer-manager.js'; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/index.js b/node_modules/@google-cloud/storage/build/cjs/src/index.js new file mode 100644 index 0000000..6a7818b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/index.js @@ -0,0 +1,94 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = require("./nodejs-common/index.js"); +Object.defineProperty(exports, "ApiError", { enumerable: true, get: function () { return index_js_1.ApiError; } }); +var storage_js_1 = require("./storage.js"); +Object.defineProperty(exports, "IdempotencyStrategy", { enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } }); +Object.defineProperty(exports, "Storage", { enumerable: true, get: function () { return storage_js_1.Storage; } }); +var bucket_js_1 = require("./bucket.js"); +Object.defineProperty(exports, "Bucket", { enumerable: true, get: function () { return bucket_js_1.Bucket; } }); +__exportStar(require("./crc32c.js"), exports); +var channel_js_1 = require("./channel.js"); +Object.defineProperty(exports, "Channel", { enumerable: true, get: function () { return channel_js_1.Channel; } }); +var file_js_1 = require("./file.js"); +Object.defineProperty(exports, "File", { enumerable: true, get: function () { return file_js_1.File; } }); +__exportStar(require("./hash-stream-validator.js"), exports); +var hmacKey_js_1 = require("./hmacKey.js"); +Object.defineProperty(exports, "HmacKey", { enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } }); +var iam_js_1 = require("./iam.js"); +Object.defineProperty(exports, "Iam", { enumerable: true, get: function () { return iam_js_1.Iam; } }); +var notification_js_1 = require("./notification.js"); +Object.defineProperty(exports, "Notification", { enumerable: true, get: function () { return notification_js_1.Notification; } }); +__exportStar(require("./transfer-manager.js"), exports); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.d.ts new file mode 100644 index 0000000..72588c7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.d.ts @@ -0,0 +1,19 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { GoogleAuthOptions } from 'google-auth-library'; +export { Service, ServiceConfig, ServiceOptions, StreamRequestOptions, } from './service.js'; +export { BaseMetadata, DeleteCallback, ExistsCallback, GetConfig, InstanceResponseCallback, Interceptor, MetadataCallback, MetadataResponse, Methods, ResponseCallback, ServiceObject, ServiceObjectConfig, ServiceObjectParent, SetMetadataResponse, } from './service-object.js'; +export { Abortable, AbortableDuplex, ApiError, BodyResponseCallback, DecorateRequestOptions, ResponseBody, util, } from './util.js'; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.js b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.js new file mode 100644 index 0000000..bf7023c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/index.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = require("./service.js"); +Object.defineProperty(exports, "Service", { enumerable: true, get: function () { return service_js_1.Service; } }); +var service_object_js_1 = require("./service-object.js"); +Object.defineProperty(exports, "ServiceObject", { enumerable: true, get: function () { return service_object_js_1.ServiceObject; } }); +var util_js_1 = require("./util.js"); +Object.defineProperty(exports, "ApiError", { enumerable: true, get: function () { return util_js_1.ApiError; } }); +Object.defineProperty(exports, "util", { enumerable: true, get: function () { return util_js_1.util; } }); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.d.ts new file mode 100644 index 0000000..083962b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.d.ts @@ -0,0 +1,218 @@ +/// +import { EventEmitter } from 'events'; +import * as r from 'teeny-request'; +import { ApiError, BodyResponseCallback, DecorateRequestOptions } from './util.js'; +export type RequestResponse = [unknown, r.Response]; +export interface ServiceObjectParent { + interceptors: Interceptor[]; + getRequestInterceptors(): Function[]; + requestStream(reqOpts: DecorateRequestOptions): r.Request; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export interface Interceptor { + request(opts: r.Options): DecorateRequestOptions; +} +export type GetMetadataOptions = object; +export type MetadataResponse = [K, r.Response]; +export type MetadataCallback = (err: Error | null, metadata?: K, apiResponse?: r.Response) => void; +export type ExistsOptions = object; +export interface ExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface ServiceObjectConfig { + /** + * The base URL to make API requests to. + */ + baseUrl?: string; + /** + * The method which creates this object. + */ + createMethod?: Function; + /** + * The identifier of the object. For example, the name of a Storage bucket or + * Pub/Sub topic. + */ + id?: string; + /** + * A map of each method name that should be inherited. + */ + methods?: Methods; + /** + * The parent service instance. For example, an instance of Storage if the + * object is Bucket. + */ + parent: ServiceObjectParent; + /** + * Override of projectId, used to allow access to resources in another project. + * For example, a BigQuery dataset in another project to which the user has been + * granted permission. + */ + projectId?: string; +} +export interface Methods { + [methodName: string]: { + reqOpts?: r.CoreOptions; + } | boolean; +} +export interface InstanceResponseCallback { + (err: ApiError | null, instance?: T | null, apiResponse?: r.Response): void; +} +export interface CreateOptions { +} +export type CreateResponse = any[]; +export interface CreateCallback { + (err: ApiError | null, instance?: T | null, ...args: any[]): void; +} +export type DeleteOptions = { + ignoreNotFound?: boolean; + ifGenerationMatch?: number | string; + ifGenerationNotMatch?: number | string; + ifMetagenerationMatch?: number | string; + ifMetagenerationNotMatch?: number | string; +} & object; +export interface DeleteCallback { + (err: Error | null, apiResponse?: r.Response): void; +} +export interface GetConfig { + /** + * Create the object if it doesn't already exist. + */ + autoCreate?: boolean; +} +export type GetOrCreateOptions = GetConfig & CreateOptions; +export type GetResponse = [T, r.Response]; +export interface ResponseCallback { + (err?: Error | null, apiResponse?: r.Response): void; +} +export type SetMetadataResponse = [K]; +export type SetMetadataOptions = object; +export interface BaseMetadata { + id?: string; + kind?: string; + etag?: string; + selfLink?: string; + [key: string]: unknown; +} +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +declare class ServiceObject extends EventEmitter { + metadata: K; + baseUrl?: string; + parent: ServiceObjectParent; + id?: string; + private createMethod?; + protected methods: Methods; + interceptors: Interceptor[]; + projectId?: string; + constructor(config: ServiceObjectConfig); + /** + * Create the object. + * + * @param {object=} options - Configuration object. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + create(options?: CreateOptions): Promise>; + create(options: CreateOptions, callback: CreateCallback): void; + create(callback: CreateCallback): void; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + /** + * Check if the object exists. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {boolean} callback.exists - Whether the object exists or not. + */ + exists(options?: ExistsOptions): Promise<[boolean]>; + exists(options: ExistsOptions, callback: ExistsCallback): void; + exists(callback: ExistsCallback): void; + /** + * Get the object if it exists. Optionally have the object created if an + * options object is provided with `autoCreate: true`. + * + * @param {object=} options - The configuration object that will be used to + * create the object if necessary. + * @param {boolean} options.autoCreate - Create the object if it doesn't already exist. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + get(options?: GetOrCreateOptions): Promise>; + get(callback: InstanceResponseCallback): void; + get(options: GetOrCreateOptions, callback: InstanceResponseCallback): void; + /** + * Get the metadata of this object. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.metadata - The metadata for this object. + * @param {object} callback.apiResponse - The full API response. + */ + getMetadata(options?: GetMetadataOptions): Promise>; + getMetadata(options: GetMetadataOptions, callback: MetadataCallback): void; + getMetadata(callback: MetadataCallback): void; + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: K, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: K, callback: MetadataCallback): void; + setMetadata(metadata: K, options: SetMetadataOptions, callback: MetadataCallback): void; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} +export { ServiceObject }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.js b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.js new file mode 100644 index 0000000..7b2cdc8 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service-object.js @@ -0,0 +1,292 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = require("@google-cloud/promisify"); +const events_1 = require("events"); +const util_js_1 = require("./util.js"); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.d.ts new file mode 100644 index 0000000..e7177a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.d.ts @@ -0,0 +1,125 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Interceptor } from './service-object.js'; +import { BodyResponseCallback, DecorateRequestOptions, MakeAuthenticatedRequest, PackageJson } from './util.js'; +export declare const DEFAULT_PROJECT_ID_TOKEN = "{{projectId}}"; +export interface StreamRequestOptions extends DecorateRequestOptions { + shouldReturnStream: true; +} +export interface ServiceConfig { + /** + * The base URL to make API requests to. + */ + baseUrl: string; + /** + * The API Endpoint to use when connecting to the service. + * Example: storage.googleapis.com + */ + apiEndpoint: string; + /** + * The scopes required for the request. + */ + scopes: string[]; + projectIdRequired?: boolean; + packageJson: PackageJson; + /** + * Reuse an existing `AuthClient` or `GoogleAuth` client instead of creating a new one. + */ + authClient?: AuthClient | GoogleAuth; + /** + * Set to true if the endpoint is a custom URL + */ + customEndpoint?: boolean; +} +export interface ServiceOptions extends Omit { + authClient?: AuthClient | GoogleAuth; + interceptors_?: Interceptor[]; + email?: string; + token?: string; + timeout?: number; + userAgent?: string; + useAuthWithCustomEndpoint?: boolean; +} +export declare class Service { + baseUrl: string; + private globalInterceptors; + interceptors: Interceptor[]; + private packageJson; + projectId: string; + private projectIdRequired; + providedUserAgent?: string; + makeAuthenticatedRequest: MakeAuthenticatedRequest; + authClient: GoogleAuth; + apiEndpoint: string; + timeout?: number; + universeDomain: string; + customEndpoint: boolean; + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config: ServiceConfig, options?: ServiceOptions); + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Get and update the Service's project ID. + * + * @param {function} callback - The callback function. + */ + getProjectId(): Promise; + getProjectId(callback: (err: Error | null, projectId?: string) => void): void; + protected getProjectIdAsync(): Promise; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.js b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.js new file mode 100644 index 0000000..21f6e53 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/service.js @@ -0,0 +1,207 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const google_auth_library_1 = require("google-auth-library"); +const uuid = __importStar(require("uuid")); +const util_js_1 = require("./util.js"); +const util_js_2 = require("../util.js"); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.d.ts new file mode 100644 index 0000000..a805858 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.d.ts @@ -0,0 +1,334 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import { CredentialBody } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Duplex, DuplexOptions, Readable, Writable } from 'stream'; +import { Interceptor } from './service-object.js'; +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +export declare const GCCL_GCS_CMD_KEY: unique symbol; +export type ResponseBody = any; +export interface DuplexifyOptions extends DuplexOptions { + autoDestroy?: boolean; + end?: boolean; +} +export interface Duplexify extends Duplex { + readonly destroyed: boolean; + setWritable(writable: Writable | false | null): void; + setReadable(readable: Readable | false | null): void; +} +export interface DuplexifyConstructor { + obj(writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + new (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; +} +export interface ParsedHttpRespMessage { + resp: r.Response; + err?: ApiError; +} +export interface MakeAuthenticatedRequest { + (reqOpts: DecorateRequestOptions): Duplexify; + (reqOpts: DecorateRequestOptions, options?: MakeAuthenticatedRequestOptions): void | Abortable; + (reqOpts: DecorateRequestOptions, callback?: BodyResponseCallback): void | Abortable; + (reqOpts: DecorateRequestOptions, optionsOrCallback?: MakeAuthenticatedRequestOptions | BodyResponseCallback): void | Abortable | Duplexify; + getCredentials: (callback: (err?: Error | null, credentials?: CredentialBody) => void) => void; + authClient: GoogleAuth; +} +export interface Abortable { + abort(): void; +} +export type AbortableDuplex = Duplexify & Abortable; +export interface PackageJson { + name: string; + version: string; +} +export interface MakeAuthenticatedRequestFactoryConfig extends Omit { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * If true, just return the provided request options. Default: false. + */ + customEndpoint?: boolean; + /** + * If true, will authenticate when using a custom endpoint. Default: false. + */ + useAuthWithCustomEndpoint?: boolean; + /** + * Account email address, required for PEM/P12 usage. + */ + email?: string; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + stream?: Duplexify; + /** + * A pre-instantiated `AuthClient` or `GoogleAuth` client that should be used. + * A new client will be created if this is not set. + */ + authClient?: AuthClient | GoogleAuth; + /** + * Determines if a projectId is required for authenticated requests. Defaults to `true`. + */ + projectIdRequired?: boolean; +} +export interface MakeAuthenticatedRequestOptions { + onAuthenticated: OnAuthenticatedCallback; +} +export interface OnAuthenticatedCallback { + (err: Error | null, reqOpts?: DecorateRequestOptions): void; +} +export interface GoogleErrorBody { + code: number; + errors?: GoogleInnerError[]; + response: r.Response; + message?: string; +} +export interface GoogleInnerError { + reason?: string; + message?: string; +} +export interface MakeWritableStreamOptions { + /** + * A connection instance used to get a token with and send the request + * through. + */ + connection?: {}; + /** + * Metadata to send at the head of the request. + */ + metadata?: { + contentType?: string; + }; + /** + * Request object, in the format of a standard Node.js http.request() object. + */ + request?: r.Options; + makeAuthenticatedRequest(reqOpts: r.OptionsWithUri & { + [GCCL_GCS_CMD_KEY]?: string; + }, fnobj: { + onAuthenticated(err: Error | null, authenticatedReqOpts?: r.Options): void; + }): void; +} +export interface DecorateRequestOptions extends r.CoreOptions { + autoPaginate?: boolean; + autoPaginateVal?: boolean; + objectMode?: boolean; + maxRetries?: number; + uri: string; + interceptors_?: Interceptor[]; + shouldReturnStream?: boolean; + projectId?: string; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface ParsedHttpResponseBody { + body: ResponseBody; + err?: Error; +} +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +export declare class ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(errorMessage: string); + constructor(errorBody: GoogleErrorBody); + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err: GoogleErrorBody, errors?: GoogleInnerError[]): string; +} +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +export declare class PartialFailureError extends Error { + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(b: GoogleErrorBody); +} +export interface BodyResponseCallback { + (err: Error | ApiError | null, body?: ResponseBody, res?: r.Response): void; +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; +} +export interface MakeRequestConfig { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + retries?: number; + retryOptions?: RetryOptions; + stream?: Duplexify; + shouldRetryFn?: (response?: r.Response) => boolean; +} +export declare class Util { + ApiError: typeof ApiError; + PartialFailureError: typeof PartialFailureError; + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop(): void; + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err: Error | null, resp?: r.Response | null, body?: ResponseBody, callback?: BodyResponseCallback): void; + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage: r.Response): ParsedHttpRespMessage; + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body: ResponseBody): ParsedHttpResponseBody; + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup: Duplexify, options: MakeWritableStreamOptions, onComplete?: Function): void; + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err?: ApiError): boolean; + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config: MakeAuthenticatedRequestFactoryConfig): MakeAuthenticatedRequest; + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts: DecorateRequestOptions, config: MakeRequestConfig, callback: BodyResponseCallback): void | Abortable; + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts: DecorateRequestOptions, projectId: string): DecorateRequestOptions; + isCustomType(unknown: any, module: string): boolean; + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback void>(optionsOrCallback?: T | C, cb?: C): [T, C]; + _getDefaultHeaders(gcclGcsCmd?: string): { + 'User-Agent': string; + 'x-goog-api-client': string; + }; +} +declare const util: Util; +export { util }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.js b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.js new file mode 100644 index 0000000..3a94d3f --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/nodejs-common/util.js @@ -0,0 +1,701 @@ +"use strict"; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = require("@google-cloud/projectify"); +const ent = __importStar(require("ent")); +const google_auth_library_1 = require("google-auth-library"); +const retry_request_1 = __importDefault(require("retry-request")); +const stream_1 = require("stream"); +const teeny_request_1 = require("teeny-request"); +const uuid = __importStar(require("uuid")); +const service_js_1 = require("./service.js"); +const util_js_1 = require("../util.js"); +const duplexify_1 = __importDefault(require("duplexify")); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = require("../package-json-helper.cjs"); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(ent.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new google_auth_library_1.GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/notification.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/notification.d.ts new file mode 100644 index 0000000..0d3341a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/notification.d.ts @@ -0,0 +1,106 @@ +import { BaseMetadata, ServiceObject } from './nodejs-common/index.js'; +import { ResponseBody } from './nodejs-common/util.js'; +import { Bucket } from './bucket.js'; +export interface DeleteNotificationOptions { + userProject?: string; +} +export interface GetNotificationMetadataOptions { + userProject?: string; +} +/** + * @typedef {array} GetNotificationMetadataResponse + * @property {object} 0 The notification metadata. + * @property {object} 1 The full API response. + */ +export type GetNotificationMetadataResponse = [ResponseBody, unknown]; +/** + * @callback GetNotificationMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} files The notification metadata. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationMetadataCallback { + (err: Error | null, metadata?: ResponseBody, apiResponse?: unknown): void; +} +/** + * @typedef {array} GetNotificationResponse + * @property {Notification} 0 The {@link Notification} + * @property {object} 1 The full API response. + */ +export type GetNotificationResponse = [Notification, unknown]; +export interface GetNotificationOptions { + /** + * Automatically create the object if it does not exist. Default: `false`. + */ + autoCreate?: boolean; + /** + * The ID of the project which will be billed for the request. + */ + userProject?: string; +} +/** + * @callback GetNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The {@link Notification}. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationCallback { + (err: Error | null, notification?: Notification | null, apiResponse?: unknown): void; +} +/** + * @callback DeleteNotificationCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ +export interface DeleteNotificationCallback { + (err: Error | null, apiResponse?: unknown): void; +} +export interface NotificationMetadata extends BaseMetadata { + custom_attributes?: { + [key: string]: string; + }; + event_types?: string[]; + object_name_prefix?: string; + payload_format?: 'JSON_API_V1' | 'NONE'; + topic?: string; +} +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +declare class Notification extends ServiceObject { + constructor(bucket: Bucket, id: string); +} +/** + * Reference to the {@link Notification} class. + * @name module:@google-cloud/storage.Notification + * @see Notification + */ +export { Notification }; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/notification.js b/node_modules/@google-cloud/storage/build/cjs/src/notification.js new file mode 100644 index 0000000..6bf57a3 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/notification.js @@ -0,0 +1,265 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Notification = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const promisify_1 = require("@google-cloud/promisify"); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/package-json-helper.cjs b/node_modules/@google-cloud/storage/build/cjs/src/package-json-helper.cjs new file mode 100644 index 0000000..794923b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/package-json-helper.cjs @@ -0,0 +1,21 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return require('../../../package.json'); +} + +exports.getPackageJSON = getPackageJSON; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.d.ts new file mode 100644 index 0000000..d88fe04 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.d.ts @@ -0,0 +1,339 @@ +/// +/// +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { GoogleAuthOptions } from 'google-auth-library'; +import { Writable, WritableOptions } from 'stream'; +import { RetryOptions, PreconditionOptions } from './storage.js'; +import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +import { FileMetadata } from './file.js'; +export declare const PROTOCOL_REGEX: RegExp; +export interface ErrorWithCode extends Error { + code: number; + status?: number | string; +} +export type CreateUriCallback = (err: Error | null, uri?: string) => void; +export interface Encryption { + key: {}; + hash: {}; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +export interface QueryParameters extends PreconditionOptions { + contentEncoding?: string; + kmsKeyName?: string; + predefinedAcl?: PredefinedAcl; + projection?: 'full' | 'noAcl'; + userProject?: string; +} +export interface UploadConfig extends Pick { + /** + * The API endpoint used for the request. + * Defaults to `storage.googleapis.com`. + * + * **Warning**: + * If this value does not match the current GCP universe an emulator context + * will be assumed and authentication will be bypassed. + */ + apiEndpoint?: string; + /** + * The name of the destination bucket. + */ + bucket: string; + /** + * The name of the destination file. + */ + file: string; + /** + * The GoogleAuthOptions passed to google-auth-library + */ + authConfig?: GoogleAuthOptions; + /** + * If you want to re-use an auth client from google-auto-auth, pass an + * instance here. + * Defaults to GoogleAuth and gets automatically overridden if an + * emulator context is detected. + */ + authClient?: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + /** + * Create a separate request per chunk. + * + * This value is in bytes and should be a multiple of 256 KiB (2^18). + * We recommend using at least 8 MiB for the chunk size. + * + * @link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + chunkSize?: number; + /** + * For each API request we send, you may specify custom request options that + * we'll add onto the request. The request options follow the gaxios API: + * https://github.com/googleapis/gaxios#request-options. + */ + customRequestOptions?: GaxiosOptions; + /** + * This will cause the upload to fail if the current generation of the remote + * object does not match the one provided here. + */ + generation?: number; + /** + * Set to `true` if the upload is only a subset of the overall object to upload. + * This can be used when planning to continue the upload an object in another + * session. + * + * **Must be used with {@link UploadConfig.chunkSize} != `0`**. + * + * If this is a continuation of a previous upload, {@link UploadConfig.offset} + * should be set. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + */ + isPartialUpload?: boolean; + /** + * A customer-supplied encryption key. See + * https://cloud.google.com/storage/docs/encryption#customer-supplied. + */ + key?: string | Buffer; + /** + * Resource name of the Cloud KMS key, of the form + * `projects/my-project/locations/global/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overrides the object metadata's + * `kms_key_name` value, if any. + */ + kmsKeyName?: string; + /** + * Any metadata you wish to set on the object. + */ + metadata?: ConfigMetadata; + /** + * The starting byte in relation to the final uploaded object. + * **Must be used with {@link UploadConfig.uri}**. + * + * If resuming an interrupted stream, do not supply this argument unless you + * know the exact number of bytes the service has AND the provided stream's + * first byte is a continuation from that provided offset. If resuming an + * interrupted stream and this option has not been provided, we will treat + * the provided upload stream as the object to upload - where the first byte + * of the upload stream is the first byte of the object to upload; skipping + * any bytes that are already present on the server. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + * @see {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.} + */ + offset?: number; + /** + * Set an Origin header when creating the resumable upload URI. + */ + origin?: string; + /** + * Specify query parameters that go along with the initial upload request. See + * https://cloud.google.com/storage/docs/json_api/v1/objects/insert#parameters + */ + params?: QueryParameters; + /** + * Apply a predefined set of access controls to the created file. + */ + predefinedAcl?: PredefinedAcl; + /** + * Make the uploaded file private. (Alias for config.predefinedAcl = + * 'private') + */ + private?: boolean; + /** + * Make the uploaded file public. (Alias for config.predefinedAcl = + * 'publicRead') + */ + public?: boolean; + /** + * The service domain for a given Cloud universe. + */ + universeDomain?: string; + /** + * If you already have a resumable URI from a previously-created resumable + * upload, just pass it in here and we'll use that. + * + * If resuming an interrupted stream and the {@link UploadConfig.offset} + * option has not been provided, we will treat the provided upload stream as + * the object to upload - where the first byte of the upload stream is the + * first byte of the object to upload; skipping any bytes that are already + * present on the server. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + */ + uri?: string; + /** + * If the bucket being accessed has requesterPays functionality enabled, this + * can be set to control which project is billed for the access of this file. + */ + userProject?: string; + /** + * Configuration options for retrying retryable errors. + */ + retryOptions: RetryOptions; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface ConfigMetadata { + [key: string]: any; + /** + * Set the length of the object being uploaded. If uploading a partial + * object, this is the overall size of the finalized object. + */ + contentLength?: number; + /** + * Set the content type of the incoming data. + */ + contentType?: string; +} +export interface GoogleInnerError { + reason?: string; +} +export interface ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; +} +export interface CheckUploadStatusConfig { + /** + * Set to `false` to disable retries within this method. + * + * @defaultValue `true` + */ + retry?: boolean; +} +export declare class Upload extends Writable { + #private; + bucket: string; + file: string; + apiEndpoint: string; + baseURI: string; + authConfig?: { + scopes?: string[]; + }; + authClient: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + cacheKey: string; + chunkSize?: number; + customRequestOptions: GaxiosOptions; + generation?: number; + key?: string | Buffer; + kmsKeyName?: string; + metadata: ConfigMetadata; + offset?: number; + origin?: string; + params: QueryParameters; + predefinedAcl?: PredefinedAcl; + private?: boolean; + public?: boolean; + uri?: string; + userProject?: string; + encryption?: Encryption; + uriProvidedManually: boolean; + numBytesWritten: number; + numRetries: number; + contentLength: number | '*'; + retryOptions: RetryOptions; + timeOfFirstRequest: number; + isPartialUpload: boolean; + private currentInvocationId; + /** + * A cache of buffers written to this instance, ready for consuming + */ + private writeBuffers; + private numChunksReadInRequest; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + private localWriteCache; + private localWriteCacheByteLength; + private upstreamEnded; + constructor(cfg: UploadConfig); + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent?: () => void): void; + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk: Buffer | string, encoding: BufferEncoding, readCallback?: () => void): void; + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + private prependLocalBufferToUpstream; + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + private pullFromChunkBuffer; + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + private waitForNextChunk; + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + private upstreamIterator; + createURI(): Promise; + createURI(callback: CreateUriCallback): void; + protected createURIAsync(): Promise; + private continueUploading; + startUploading(): Promise; + private responseHandler; + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + checkUploadStatus(config?: CheckUploadStatusConfig): Promise>; + private getAndSetOffset; + private makeRequest; + private makeRequestStream; + /** + * @return {bool} is the request good? + */ + private onResponse; + /** + * @param resp GaxiosResponse object from previous attempt + */ + private attemptDelayedRetry; + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + private getRetryDelay; + private sanitizeEndpoint; + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status: number): boolean; +} +export declare function upload(cfg: UploadConfig): Upload; +export declare function createURI(cfg: UploadConfig): Promise; +export declare function createURI(cfg: UploadConfig, callback: CreateUriCallback): void; +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +export declare function checkUploadStatus(cfg: UploadConfig & Required>): Promise>; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.js b/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.js new file mode 100644 index 0000000..c1aa07f --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/resumable-upload.js @@ -0,0 +1,886 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; +const abort_controller_1 = __importDefault(require("abort-controller")); +const crypto_1 = require("crypto"); +const gaxios = __importStar(require("gaxios")); +const google_auth_library_1 = require("google-auth-library"); +const stream_1 = require("stream"); +const async_retry_1 = __importDefault(require("async-retry")); +const uuid = __importStar(require("uuid")); +const util_js_1 = require("./util.js"); +const util_js_2 = require("./nodejs-common/util.js"); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = require("./package-json-helper.cjs"); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === + `.${google_auth_library_1.DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error('Retry limit exceeded - ' + resp.data)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +exports.upload = upload; +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +exports.createURI = createURI; +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} +exports.checkUploadStatus = checkUploadStatus; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/signer.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/signer.d.ts new file mode 100644 index 0000000..b15435e --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/signer.d.ts @@ -0,0 +1,148 @@ +/// +/// +import * as http from 'http'; +import { Storage } from './storage.js'; +import { GoogleAuth } from 'google-auth-library'; +type GoogleAuthLike = Pick; +/** + * @deprecated Use {@link GoogleAuth} instead + */ +export interface AuthClient { + sign(blobToSign: string): Promise; + getCredentials(): Promise<{ + client_email?: string; + }>; +} +export interface BucketI { + name: string; +} +export interface FileI { + name: string; +} +export interface Query { + [key: string]: string; +} +export interface GetSignedUrlConfigInternal { + expiration: number; + accessibleAt?: Date; + method: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + cname?: string; + contentMd5?: string; + contentType?: string; + bucket: string; + file?: string; + /** + * The host for the generated signed URL + * + * @example + * 'https://localhost:8080/' + */ + host?: string | URL; + /** + * An endpoint for generating the signed URL + * + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string | URL; +} +export interface SignerGetSignedUrlConfig { + method: 'GET' | 'PUT' | 'DELETE' | 'POST'; + expires: string | number | Date; + accessibleAt?: string | number | Date; + virtualHostedStyle?: boolean; + version?: 'v2' | 'v4'; + cname?: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + contentMd5?: string; + contentType?: string; + /** + * The host for the generated signed URL + * + * @example + * 'https://localhost:8080/' + */ + host?: string | URL; + /** + * An endpoint for generating the signed URL + * + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string | URL; +} +export type SignerGetSignedUrlResponse = string; +export type GetSignedUrlResponse = [SignerGetSignedUrlResponse]; +export interface GetSignedUrlCallback { + (err: Error | null, url?: string): void; +} +export declare enum SignerExceptionMessages { + ACCESSIBLE_DATE_INVALID = "The accessible at date provided was invalid.", + EXPIRATION_BEFORE_ACCESSIBLE_DATE = "An expiration date cannot be before accessible date.", + X_GOOG_CONTENT_SHA256 = "The header X-Goog-Content-SHA256 must be a hexadecimal string." +} +/** + * @const {string} + * @deprecated - unused + */ +export declare const PATH_STYLED_HOST = "https://storage.googleapis.com"; +export declare class URLSigner { + private auth; + private bucket; + private file?; + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + private storage; + constructor(auth: AuthClient | GoogleAuthLike, bucket: BucketI, file?: FileI | undefined, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage?: Storage); + getSignedUrl(cfg: SignerGetSignedUrlConfig): Promise; + private getSignedUrlV2; + private getSignedUrlV4; + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers: http.OutgoingHttpHeaders): string; + getCanonicalRequest(method: string, path: string, query: string, headers: string, signedHeaders: string, contentSha256?: string): string; + getCanonicalQueryParams(query: Query): string; + getResourcePath(cname: boolean, bucket: string, file?: string): string; + parseExpires(expires: string | number | Date, current?: Date): number; + parseAccessibleAt(accessibleAt?: string | number | Date): number; +} +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +export declare class SigningError extends Error { + name: string; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/signer.js b/node_modules/@google-cloud/storage/build/cjs/src/signer.js new file mode 100644 index 0000000..8a8f6fb --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/signer.js @@ -0,0 +1,327 @@ +"use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(require("crypto")); +const url = __importStar(require("url")); +const storage_js_1 = require("./storage.js"); +const util_js_1 = require("./util.js"); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new storage_js_1.Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/storage.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/storage.d.ts new file mode 100644 index 0000000..1a0ecb5 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/storage.d.ts @@ -0,0 +1,723 @@ +/// +import { ApiError, Service, ServiceOptions } from './nodejs-common/index.js'; +import { Readable } from 'stream'; +import { Bucket } from './bucket.js'; +import { Channel } from './channel.js'; +import { File } from './file.js'; +import { HmacKey, HmacKeyMetadata, HmacKeyOptions } from './hmacKey.js'; +import { CRC32CValidatorGenerator } from './crc32c.js'; +export interface GetServiceAccountOptions { + userProject?: string; +} +export interface ServiceAccount { + emailAddress?: string; +} +export type GetServiceAccountResponse = [ServiceAccount, unknown]; +export interface GetServiceAccountCallback { + (err: Error | null, serviceAccount?: ServiceAccount, apiResponse?: unknown): void; +} +export interface CreateBucketQuery { + project: string; + userProject: string; + enableObjectRetention: boolean; +} +export declare enum IdempotencyStrategy { + RetryAlways = 0, + RetryConditional = 1, + RetryNever = 2 +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; + idempotencyStrategy?: IdempotencyStrategy; +} +export interface PreconditionOptions { + ifGenerationMatch?: number | string; + ifGenerationNotMatch?: number | string; + ifMetagenerationMatch?: number | string; + ifMetagenerationNotMatch?: number | string; +} +export interface StorageOptions extends ServiceOptions { + /** + * The API endpoint of the service used to make requests. + * Defaults to `storage.googleapis.com`. + */ + apiEndpoint?: string; + crc32cGenerator?: CRC32CValidatorGenerator; + retryOptions?: RetryOptions; +} +export interface BucketOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface Cors { + maxAgeSeconds?: number; + method?: string[]; + origin?: string[]; + responseHeader?: string[]; +} +interface Versioning { + enabled: boolean; +} +/** + * Custom placement configuration. + * Initially used for dual-region buckets. + **/ +export interface CustomPlacementConfig { + dataLocations?: string[]; +} +export interface AutoclassConfig { + enabled?: boolean; + terminalStorageClass?: 'NEARLINE' | 'ARCHIVE'; +} +export interface CreateBucketRequest { + archive?: boolean; + autoclass?: AutoclassConfig; + coldline?: boolean; + cors?: Cors[]; + customPlacementConfig?: CustomPlacementConfig; + dra?: boolean; + enableObjectRetention?: boolean; + location?: string; + multiRegional?: boolean; + nearline?: boolean; + regional?: boolean; + requesterPays?: boolean; + retentionPolicy?: object; + rpo?: string; + standard?: boolean; + storageClass?: string; + userProject?: string; + versioning?: Versioning; +} +export type CreateBucketResponse = [Bucket, unknown]; +export interface BucketCallback { + (err: Error | null, bucket?: Bucket | null, apiResponse?: unknown): void; +} +export type GetBucketsResponse = [Bucket[], {}, unknown]; +export interface GetBucketsCallback { + (err: Error | null, buckets: Bucket[], nextQuery?: {}, apiResponse?: unknown): void; +} +export interface GetBucketsRequest { + prefix?: string; + project?: string; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface HmacKeyResourceResponse { + metadata: HmacKeyMetadata; + secret: string; +} +export type CreateHmacKeyResponse = [HmacKey, string, HmacKeyResourceResponse]; +export interface CreateHmacKeyOptions { + projectId?: string; + userProject?: string; +} +export interface CreateHmacKeyCallback { + (err: Error | null, hmacKey?: HmacKey | null, secret?: string | null, apiResponse?: HmacKeyResourceResponse): void; +} +export interface GetHmacKeysOptions { + projectId?: string; + serviceAccountEmail?: string; + showDeletedKeys?: boolean; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface GetHmacKeysCallback { + (err: Error | null, hmacKeys: HmacKey[] | null, nextQuery?: {}, apiResponse?: unknown): void; +} +export declare enum ExceptionMessages { + EXPIRATION_DATE_INVALID = "The expiration date provided was invalid.", + EXPIRATION_DATE_PAST = "An expiration date cannot be in the past." +} +export declare enum StorageExceptionMessages { + BUCKET_NAME_REQUIRED = "A bucket name is needed to use Cloud Storage.", + BUCKET_NAME_REQUIRED_CREATE = "A name is required to create a bucket.", + HMAC_SERVICE_ACCOUNT = "The first argument must be a service account email to create an HMAC key.", + HMAC_ACCESS_ID = "An access ID is needed to create an HmacKey object." +} +export type GetHmacKeysResponse = [HmacKey[]]; +export declare const PROTOCOL_REGEX: RegExp; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +export declare const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +export declare const MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +export declare const RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +export declare const TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +export declare const MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +export declare const RETRYABLE_ERR_FN_DEFAULT: (err?: ApiError) => boolean; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +export declare class Storage extends Service { + /** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ + static Bucket: typeof Bucket; + /** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ + static Channel: typeof Channel; + /** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ + static File: typeof File; + /** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ + static HmacKey: typeof HmacKey; + static acl: { + OWNER_ROLE: string; + READER_ROLE: string; + WRITER_ROLE: string; + }; + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + acl: typeof Storage.acl; + crc32cGenerator: CRC32CValidatorGenerator; + getBucketsStream(): Readable; + getHmacKeysStream(): Readable; + retryOptions: RetryOptions; + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options?: StorageOptions); + private static sanitizeEndpoint; + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name: string, options?: BucketOptions): Bucket; + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id: string, resourceId: string): Channel; + createBucket(name: string, metadata?: CreateBucketRequest): Promise; + createBucket(name: string, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createHmacKey(serviceAccountEmail: string, options?: CreateHmacKeyOptions): Promise; + createHmacKey(serviceAccountEmail: string, callback: CreateHmacKeyCallback): void; + createHmacKey(serviceAccountEmail: string, options: CreateHmacKeyOptions, callback: CreateHmacKeyCallback): void; + getBuckets(options?: GetBucketsRequest): Promise; + getBuckets(options: GetBucketsRequest, callback: GetBucketsCallback): void; + getBuckets(callback: GetBucketsCallback): void; + /** + * Query object for listing HMAC keys. + * + * @typedef {object} GetHmacKeysOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [serviceAccountEmail] If present, only HMAC keys for the + * given service account are returned. + * @property {boolean} [showDeletedKeys=false] If true, include keys in the DELETE + * state. Default is false. + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} GetHmacKeysResponse + * @property {HmacKey[]} 0 Array of {@link HmacKey} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetHmacKeysCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey[]} hmacKeys Array of {@link HmacKey} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Retrieves a list of HMAC keys matching the criteria. + * + * The authenticated user must have storage.hmacKeys.list permission for the project in which the key exists. + * + * @param {GetHmacKeysOption} options Configuration options. + * @param {GetHmacKeysCallback} callback Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getHmacKeys(function(err, hmacKeys) { + * if (!err) { + * // hmacKeys is an array of HmacKey objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, hmacKeys, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getHmacKeys(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * hmacKeys[0].metadata; + * }; + * + * storage.getHmacKeys({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getHmacKeys().then(function(data) { + * const hmacKeys = data[0]; + * }); + * ``` + */ + getHmacKeys(options?: GetHmacKeysOptions): Promise; + getHmacKeys(callback: GetHmacKeysCallback): void; + getHmacKeys(options: GetHmacKeysOptions, callback: GetHmacKeysCallback): void; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options: GetServiceAccountOptions, callback: GetServiceAccountCallback): void; + getServiceAccount(callback: GetServiceAccountCallback): void; + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId: string, options?: HmacKeyOptions): HmacKey; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/storage.js b/node_modules/@google-cloud/storage/build/cjs/src/storage.js new file mode 100644 index 0000000..89c6145 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/storage.js @@ -0,0 +1,1149 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = require("./nodejs-common/index.js"); +const paginator_1 = require("@google-cloud/paginator"); +const promisify_1 = require("@google-cloud/promisify"); +const stream_1 = require("stream"); +const bucket_js_1 = require("./bucket.js"); +const channel_js_1 = require("./channel.js"); +const file_js_1 = require("./file.js"); +const util_js_1 = require("./util.js"); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = require("./package-json-helper.cjs"); +const hmacKey_js_1 = require("./hmacKey.js"); +const crc32c_js_1 = require("./crc32c.js"); +const google_auth_library_1 = require("google-auth-library"); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] **Early Access Testers Only** + * Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); diff --git a/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.d.ts new file mode 100644 index 0000000..9bf8fd7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.d.ts @@ -0,0 +1,246 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { Bucket, UploadOptions, UploadResponse } from './bucket.js'; +import { DownloadOptions, DownloadResponse, File } from './file.js'; +import { GaxiosResponse } from 'gaxios'; +export interface UploadManyFilesOptions { + concurrencyLimit?: number; + skipIfExists?: boolean; + prefix?: string; + passthroughOptions?: Omit; +} +export interface DownloadManyFilesOptions { + concurrencyLimit?: number; + prefix?: string; + stripPrefix?: string; + passthroughOptions?: DownloadOptions; +} +export interface DownloadFileInChunksOptions { + concurrencyLimit?: number; + chunkSizeBytes?: number; + destination?: string; + validation?: 'crc32c' | false; + noReturnData?: boolean; +} +export interface UploadFileInChunksOptions { + concurrencyLimit?: number; + chunkSizeBytes?: number; + uploadName?: string; + maxQueueSize?: number; + uploadId?: string; + autoAbortFailure?: boolean; + partsMap?: Map; + validation?: 'md5' | false; + headers?: { + [key: string]: string; + }; +} +export interface MultiPartUploadHelper { + bucket: Bucket; + fileName: string; + uploadId?: string; + partsMap?: Map; + initiateUpload(headers?: { + [key: string]: string; + }): Promise; + uploadPart(partNumber: number, chunk: Buffer, validation?: 'md5' | false): Promise; + completeUpload(): Promise; + abortUpload(): Promise; +} +export type MultiPartHelperGenerator = (bucket: Bucket, fileName: string, uploadId?: string, partsMap?: Map) => MultiPartUploadHelper; +export declare class MultiPartUploadError extends Error { + private uploadId; + private partsMap; + constructor(message: string, uploadId: string, partsMap: Map); +} +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +export declare class TransferManager { + bucket: Bucket; + constructor(bucket: Bucket); + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + uploadManyFiles(filePathsOrDirectory: string[] | string, options?: UploadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + downloadManyFiles(filesOrFolder: File[] | string[] | string, options?: DownloadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise; + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + uploadFileInChunks(filePath: string, options?: UploadFileInChunksOptions, generator?: MultiPartHelperGenerator): Promise; + private getPathsFromDirectory; +} diff --git a/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.js b/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.js new file mode 100644 index 0000000..6d5c77c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/transfer-manager.js @@ -0,0 +1,664 @@ +"use strict"; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = require("./file.js"); +const p_limit_1 = __importDefault(require("p-limit")); +const path = __importStar(require("path")); +const fs_1 = require("fs"); +const crc32c_js_1 = require("./crc32c.js"); +const google_auth_library_1 = require("google-auth-library"); +const fast_xml_parser_1 = require("fast-xml-parser"); +const async_retry_1 = __importDefault(require("async-retry")); +const crypto_1 = require("crypto"); +const util_js_1 = require("./nodejs-common/util.js"); +const util_js_2 = require("./util.js"); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = require("./package-json-helper.cjs"); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = filePath + .split(path.sep) + .join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + promises.push(limit(() => file.download(passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; diff --git a/node_modules/@google-cloud/storage/build/cjs/src/util.d.ts b/node_modules/@google-cloud/storage/build/cjs/src/util.d.ts new file mode 100644 index 0000000..6f3a18b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/util.d.ts @@ -0,0 +1,88 @@ +/// +/// +/// +import * as querystring from 'querystring'; +import { PassThrough } from 'stream'; +export declare function normalize(optionsOrCallback?: T | U, cb?: U): { + options: T; + callback: U; +}; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +export declare function objectEntries(obj: { + [key: string]: T; +}): Array<[string, T]>; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +export declare function fixedEncodeURIComponent(str: string): string; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +export declare function encodeURI(uri: string, encodeSlash: boolean): string; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +export declare function qsStringify(qs: querystring.ParsedUrlQueryInput): string; +export declare function objectKeyToLowercase(object: { + [key: string]: T; +}): { + [key: string]: T; +}; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +export declare function unicodeJSONStringify(obj: object): string; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +export declare function convertObjKeysToSnakeCase(obj: object): object; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +export declare function formatAsUTCISO(dateTimeToFormat: Date, includeTime?: boolean, dateDelimiter?: string, timeDelimiter?: string): string; +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +export declare function getRuntimeTrackingString(): string; +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +export declare function getUserAgentString(): string; +export declare function getDirName(): string; +export declare function getModuleFormat(): "ESM" | "CJS"; +export declare class PassThroughShim extends PassThrough { + private shouldEmitReading; + private shouldEmitWriting; + _read(size: number): void; + _write(chunk: never, encoding: BufferEncoding, callback: (error?: Error | null | undefined) => void): void; + _final(callback: (error?: Error | null | undefined) => void): void; +} diff --git a/node_modules/@google-cloud/storage/build/cjs/src/util.js b/node_modules/@google-cloud/storage/build/cjs/src/util.js new file mode 100644 index 0000000..f0645a7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/cjs/src/util.js @@ -0,0 +1,270 @@ +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; + } + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; +const path = __importStar(require("path")); +const querystring = __importStar(require("querystring")); +const stream_1 = require("stream"); +const url = __importStar(require("url")); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = require("./package-json-helper.cjs"); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; +} +exports.normalize = normalize; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +exports.objectEntries = objectEntries; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); +} +exports.encodeURI = encodeURI; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); +} +exports.qsStringify = qsStringify; +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +exports.objectKeyToLowercase = objectKeyToLowercase; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +exports.unicodeJSONStringify = unicodeJSONStringify; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +exports.formatAsUTCISO = formatAsUTCISO; +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } else { + return `gl-node/${process.versions.node}`; + } +} +exports.getRuntimeTrackingString = getRuntimeTrackingString; +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +exports.getUserAgentString = getUserAgentString; +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; + } + return dirToUse; +} +exports.getDirName = getDirName; +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +exports.getModuleFormat = getModuleFormat; +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; diff --git a/node_modules/@google-cloud/storage/build/esm/src/acl.d.ts b/node_modules/@google-cloud/storage/build/esm/src/acl.d.ts new file mode 100644 index 0000000..d8f31a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/acl.d.ts @@ -0,0 +1,165 @@ +import { BodyResponseCallback, DecorateRequestOptions, BaseMetadata } from './nodejs-common/index.js'; +export interface AclOptions { + pathPrefix: string; + request: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; +} +export type GetAclResponse = [ + AccessControlObject | AccessControlObject[], + AclMetadata +]; +export interface GetAclCallback { + (err: Error | null, acl?: AccessControlObject | AccessControlObject[] | null, apiResponse?: AclMetadata): void; +} +export interface GetAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface UpdateAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type UpdateAclResponse = [AccessControlObject, AclMetadata]; +export interface UpdateAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: AclMetadata): void; +} +export interface AddAclOptions { + entity: string; + role: string; + generation?: number; + userProject?: string; +} +export type AddAclResponse = [AccessControlObject, AclMetadata]; +export interface AddAclCallback { + (err: Error | null, acl?: AccessControlObject | null, apiResponse?: AclMetadata): void; +} +export type RemoveAclResponse = [AclMetadata]; +export interface RemoveAclCallback { + (err: Error | null, apiResponse?: AclMetadata): void; +} +export interface RemoveAclOptions { + entity: string; + generation?: number; + userProject?: string; +} +export interface AccessControlObject { + entity: string; + role: string; + projectTeam: string; +} +export interface AclMetadata extends BaseMetadata { + bucket?: string; + domain?: string; + entity?: string; + entityId?: string; + generation?: string; + object?: string; + projectTeam?: { + projectNumber?: string; + team?: 'editors' | 'owners' | 'viewers'; + }; + role?: 'OWNER' | 'READER' | 'WRITER' | 'FULL_CONTROL'; + [key: string]: unknown; +} +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +declare class AclRoleAccessorMethods { + private static accessMethods; + private static entities; + private static roles; + owners: {}; + readers: {}; + writers: {}; + constructor(); + _assignAccessMethods(role: string): void; +} +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +declare class Acl extends AclRoleAccessorMethods { + default: Acl; + pathPrefix: string; + request_: (reqOpts: DecorateRequestOptions, callback: BodyResponseCallback) => void; + constructor(options: AclOptions); + add(options: AddAclOptions): Promise; + add(options: AddAclOptions, callback: AddAclCallback): void; + delete(options: RemoveAclOptions): Promise; + delete(options: RemoveAclOptions, callback: RemoveAclCallback): void; + get(options?: GetAclOptions): Promise; + get(options: GetAclOptions, callback: GetAclCallback): void; + get(callback: GetAclCallback): void; + update(options: UpdateAclOptions): Promise; + update(options: UpdateAclOptions, callback: UpdateAclCallback): void; + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject: AccessControlObject): AccessControlObject; + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export { Acl, AclRoleAccessorMethods }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/acl.js b/node_modules/@google-cloud/storage/build/esm/src/acl.js new file mode 100644 index 0000000..d546a8d --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/acl.js @@ -0,0 +1,714 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { promisifyAll } from '@google-cloud/promisify'; +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Acl, { + exclude: ['request'], +}); +export { Acl, AclRoleAccessorMethods }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/bucket.d.ts b/node_modules/@google-cloud/storage/build/esm/src/bucket.d.ts new file mode 100644 index 0000000..1595d39 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/bucket.d.ts @@ -0,0 +1,801 @@ +/// +/// +/// +/// +import { ApiError, BodyResponseCallback, DecorateRequestOptions, DeleteCallback, ExistsCallback, GetConfig, MetadataCallback, ServiceObject, SetMetadataResponse } from './nodejs-common/index.js'; +import { RequestResponse } from './nodejs-common/service-object.js'; +import * as http from 'http'; +import { Acl, AclMetadata } from './acl.js'; +import { Channel } from './channel.js'; +import { File, FileOptions, CreateResumableUploadOptions, CreateWriteStreamOptions } from './file.js'; +import { Iam } from './iam.js'; +import { Notification } from './notification.js'; +import { Storage, Cors, PreconditionOptions, BucketOptions } from './storage.js'; +import { GetSignedUrlResponse, GetSignedUrlCallback, SignerGetSignedUrlConfig, URLSigner, Query } from './signer.js'; +import { Readable } from 'stream'; +import { CRC32CValidatorGenerator } from './crc32c.js'; +import { URL } from 'url'; +import { BaseMetadata, SetMetadataOptions } from './nodejs-common/service-object.js'; +export type GetFilesResponse = [File[], {}, unknown]; +export interface GetFilesCallback { + (err: Error | null, files?: File[], nextQuery?: {}, apiResponse?: unknown): void; +} +interface WatchAllOptions { + delimiter?: string; + maxResults?: number; + pageToken?: string; + prefix?: string; + projection?: string; + userProject?: string; + versions?: boolean; +} +export interface AddLifecycleRuleOptions extends PreconditionOptions { + append?: boolean; +} +export interface LifecycleAction { + type: 'Delete' | 'SetStorageClass' | 'AbortIncompleteMultipartUpload'; + storageClass?: string; +} +export interface LifecycleCondition { + age?: number; + createdBefore?: Date | string; + customTimeBefore?: Date | string; + daysSinceCustomTime?: number; + daysSinceNoncurrentTime?: number; + isLive?: boolean; + matchesPrefix?: string[]; + matchesSuffix?: string[]; + matchesStorageClass?: string[]; + noncurrentTimeBefore?: Date | string; + numNewerVersions?: number; +} +export interface LifecycleRule { + action: LifecycleAction; + condition: LifecycleCondition; +} +export interface LifecycleCondition { + age?: number; + createdBefore?: Date | string; + customTimeBefore?: Date | string; + daysSinceCustomTime?: number; + daysSinceNoncurrentTime?: number; + isLive?: boolean; + matchesPrefix?: string[]; + matchesSuffix?: string[]; + matchesStorageClass?: string[]; + noncurrentTimeBefore?: Date | string; + numNewerVersions?: number; +} +export interface LifecycleRule { + action: LifecycleAction; + condition: LifecycleCondition; +} +export interface EnableLoggingOptions extends PreconditionOptions { + bucket?: string | Bucket; + prefix: string; +} +export interface GetFilesOptions { + autoPaginate?: boolean; + delimiter?: string; + endOffset?: string; + includeFoldersAsPrefixes?: boolean; + includeTrailingDelimiter?: boolean; + prefix?: string; + matchGlob?: string; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + softDeleted?: boolean; + startOffset?: string; + userProject?: string; + versions?: boolean; +} +export interface CombineOptions extends PreconditionOptions { + kmsKeyName?: string; + userProject?: string; +} +export interface CombineCallback { + (err: Error | null, newFile: File | null, apiResponse: unknown): void; +} +export type CombineResponse = [File, unknown]; +export interface CreateChannelConfig extends WatchAllOptions { + address: string; +} +export interface CreateChannelOptions { + userProject?: string; +} +export type CreateChannelResponse = [Channel, unknown]; +export interface CreateChannelCallback { + (err: Error | null, channel: Channel | null, apiResponse: unknown): void; +} +export interface CreateNotificationOptions { + customAttributes?: { + [key: string]: string; + }; + eventTypes?: string[]; + objectNamePrefix?: string; + payloadFormat?: string; + userProject?: string; +} +export interface CreateNotificationCallback { + (err: Error | null, notification: Notification | null, apiResponse: unknown): void; +} +export type CreateNotificationResponse = [Notification, unknown]; +export interface DeleteBucketOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteBucketResponse = [unknown]; +export interface DeleteBucketCallback extends DeleteCallback { + (err: Error | null, apiResponse: unknown): void; +} +export interface DeleteFilesOptions extends GetFilesOptions, PreconditionOptions { + force?: boolean; +} +export interface DeleteFilesCallback { + (err: Error | Error[] | null, apiResponse?: object): void; +} +export type DeleteLabelsResponse = [unknown]; +export type DeleteLabelsCallback = SetLabelsCallback; +export type DeleteLabelsOptions = PreconditionOptions; +export type DisableRequesterPaysOptions = PreconditionOptions; +export type DisableRequesterPaysResponse = [unknown]; +export interface DisableRequesterPaysCallback { + (err?: Error | null, apiResponse?: object): void; +} +export type EnableRequesterPaysResponse = [unknown]; +export interface EnableRequesterPaysCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type EnableRequesterPaysOptions = PreconditionOptions; +export interface BucketExistsOptions extends GetConfig { + userProject?: string; +} +export type BucketExistsResponse = [boolean]; +export type BucketExistsCallback = ExistsCallback; +export interface GetBucketOptions extends GetConfig { + userProject?: string; +} +export type GetBucketResponse = [Bucket, unknown]; +export interface GetBucketCallback { + (err: ApiError | null, bucket: Bucket | null, apiResponse: unknown): void; +} +export interface GetLabelsOptions { + userProject?: string; +} +export type GetLabelsResponse = [unknown]; +export interface GetLabelsCallback { + (err: Error | null, labels: object | null): void; +} +export interface BucketMetadata extends BaseMetadata { + acl?: AclMetadata[] | null; + autoclass?: { + enabled?: boolean; + toggleTime?: string; + terminalStorageClass?: string; + terminalStorageClassUpdateTime?: string; + }; + billing?: { + requesterPays?: boolean; + }; + cors?: Cors[]; + customPlacementConfig?: { + dataLocations?: string[]; + }; + defaultEventBasedHold?: boolean; + defaultObjectAcl?: AclMetadata[]; + encryption?: { + defaultKmsKeyName?: string; + } | null; + iamConfiguration?: { + publicAccessPrevention?: string; + uniformBucketLevelAccess?: { + enabled?: boolean; + lockedTime?: string; + }; + }; + labels?: { + [key: string]: string | null; + }; + lifecycle?: { + rule?: LifecycleRule[]; + } | null; + location?: string; + locationType?: string; + logging?: { + logBucket?: string; + logObjectPrefix?: string; + }; + metageneration?: string; + name?: string; + objectRetention?: { + mode?: string; + }; + owner?: { + entity?: string; + entityId?: string; + }; + projectNumber?: string | number; + retentionPolicy?: { + effectiveTime?: string; + isLocked?: boolean; + retentionPeriod?: string | number; + } | null; + rpo?: string; + softDeletePolicy?: { + retentionDurationSeconds?: string | number; + readonly effectiveTime?: string; + }; + storageClass?: string; + timeCreated?: string; + updated?: string; + versioning?: { + enabled?: boolean; + }; + website?: { + mainPageSuffix?: string; + notFoundPage?: string; + }; +} +export type GetBucketMetadataResponse = [BucketMetadata, unknown]; +export interface GetBucketMetadataCallback { + (err: ApiError | null, metadata: BucketMetadata | null, apiResponse: unknown): void; +} +export interface GetBucketMetadataOptions { + userProject?: string; +} +export interface GetBucketSignedUrlConfig extends Pick { + action: 'list'; + version?: 'v2' | 'v4'; + cname?: string; + virtualHostedStyle?: boolean; + expires: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; +} +export declare enum BucketActionToHTTPMethod { + list = "GET" +} +export declare enum AvailableServiceObjectMethods { + setMetadata = 0, + delete = 1 +} +export interface GetNotificationsOptions { + userProject?: string; +} +export interface GetNotificationsCallback { + (err: Error | null, notifications: Notification[] | null, apiResponse: unknown): void; +} +export type GetNotificationsResponse = [Notification[], unknown]; +export interface MakeBucketPrivateOptions { + includeFiles?: boolean; + force?: boolean; + metadata?: BucketMetadata; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeBucketPrivateResponse = [File[]]; +export interface MakeBucketPrivateCallback { + (err?: Error | null, files?: File[]): void; +} +export interface MakeBucketPublicOptions { + includeFiles?: boolean; + force?: boolean; +} +export interface MakeBucketPublicCallback { + (err?: Error | null, files?: File[]): void; +} +export type MakeBucketPublicResponse = [File[]]; +export interface SetBucketMetadataOptions extends PreconditionOptions { + userProject?: string; +} +export type SetBucketMetadataResponse = [BucketMetadata]; +export interface SetBucketMetadataCallback { + (err?: Error | null, metadata?: BucketMetadata): void; +} +export interface BucketLockCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type BucketLockResponse = [unknown]; +export interface Labels { + [key: string]: string; +} +export interface SetLabelsOptions extends PreconditionOptions { + userProject?: string; +} +export type SetLabelsResponse = [unknown]; +export interface SetLabelsCallback { + (err?: Error | null, metadata?: unknown): void; +} +export interface SetBucketStorageClassOptions extends PreconditionOptions { + userProject?: string; +} +export interface SetBucketStorageClassCallback { + (err?: Error | null): void; +} +export type UploadResponse = [File, unknown]; +export interface UploadCallback { + (err: Error | null, file?: File | null, apiResponse?: unknown): void; +} +export interface UploadOptions extends CreateResumableUploadOptions, CreateWriteStreamOptions { + destination?: string | File; + encryptionKey?: string | Buffer; + kmsKeyName?: string; + onUploadProgress?: (progressEvent: any) => void; +} +export interface MakeAllFilesPublicPrivateOptions { + force?: boolean; + private?: boolean; + public?: boolean; + userProject?: string; +} +interface MakeAllFilesPublicPrivateCallback { + (err?: Error | Error[] | null, files?: File[]): void; +} +type MakeAllFilesPublicPrivateResponse = [File[]]; +export declare enum BucketExceptionMessages { + PROVIDE_SOURCE_FILE = "You must provide at least one source file.", + DESTINATION_FILE_NOT_SPECIFIED = "A destination file must be specified.", + CHANNEL_ID_REQUIRED = "An ID is required to create a channel.", + TOPIC_NAME_REQUIRED = "A valid topic name is required.", + CONFIGURATION_OBJECT_PREFIX_REQUIRED = "A configuration object with a prefix is required.", + SPECIFY_FILE_NAME = "A file name must be specified.", + METAGENERATION_NOT_PROVIDED = "A metageneration must be provided.", + SUPPLY_NOTIFICATION_ID = "You must supply a notification ID." +} +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +declare class Bucket extends ServiceObject { + name: string; + /** + * A reference to the {@link Storage} associated with this {@link Bucket} + * instance. + * @name Bucket#storage + * @type {Storage} + */ + storage: Storage; + /** + * A user project to apply to each request from this bucket. + * @name Bucket#userProject + * @type {string} + */ + userProject?: string; + acl: Acl; + iam: Iam; + crc32cGenerator: CRC32CValidatorGenerator; + getFilesStream(query?: GetFilesOptions): Readable; + signer?: URLSigner; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + constructor(storage: Storage, name: string, options?: BucketOptions); + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options?: AddLifecycleRuleOptions): Promise; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], options: AddLifecycleRuleOptions, callback: SetBucketMetadataCallback): void; + addLifecycleRule(rule: LifecycleRule | LifecycleRule[], callback: SetBucketMetadataCallback): void; + combine(sources: string[] | File[], destination: string | File, options?: CombineOptions): Promise; + combine(sources: string[] | File[], destination: string | File, options: CombineOptions, callback: CombineCallback): void; + combine(sources: string[] | File[], destination: string | File, callback: CombineCallback): void; + createChannel(id: string, config: CreateChannelConfig, options?: CreateChannelOptions): Promise; + createChannel(id: string, config: CreateChannelConfig, callback: CreateChannelCallback): void; + createChannel(id: string, config: CreateChannelConfig, options: CreateChannelOptions, callback: CreateChannelCallback): void; + createNotification(topic: string, options?: CreateNotificationOptions): Promise; + createNotification(topic: string, options: CreateNotificationOptions, callback: CreateNotificationCallback): void; + createNotification(topic: string, callback: CreateNotificationCallback): void; + deleteFiles(query?: DeleteFilesOptions): Promise; + deleteFiles(callback: DeleteFilesCallback): void; + deleteFiles(query: DeleteFilesOptions, callback: DeleteFilesCallback): void; + deleteLabels(labels?: string | string[]): Promise; + deleteLabels(options: DeleteLabelsOptions): Promise; + deleteLabels(callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions): Promise; + deleteLabels(labels: string | string[], callback: DeleteLabelsCallback): void; + deleteLabels(labels: string | string[], options: DeleteLabelsOptions, callback: DeleteLabelsCallback): void; + disableRequesterPays(options?: DisableRequesterPaysOptions): Promise; + disableRequesterPays(callback: DisableRequesterPaysCallback): void; + disableRequesterPays(options: DisableRequesterPaysOptions, callback: DisableRequesterPaysCallback): void; + enableLogging(config: EnableLoggingOptions): Promise; + enableLogging(config: EnableLoggingOptions, callback: SetBucketMetadataCallback): void; + enableRequesterPays(options?: EnableRequesterPaysOptions): Promise; + enableRequesterPays(callback: EnableRequesterPaysCallback): void; + enableRequesterPays(options: EnableRequesterPaysOptions, callback: EnableRequesterPaysCallback): void; + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name: string, options?: FileOptions): File; + getFiles(query?: GetFilesOptions): Promise; + getFiles(query: GetFilesOptions, callback: GetFilesCallback): void; + getFiles(callback: GetFilesCallback): void; + getLabels(options?: GetLabelsOptions): Promise; + getLabels(callback: GetLabelsCallback): void; + getLabels(options: GetLabelsOptions, callback: GetLabelsCallback): void; + getNotifications(options?: GetNotificationsOptions): Promise; + getNotifications(callback: GetNotificationsCallback): void; + getNotifications(options: GetNotificationsOptions, callback: GetNotificationsCallback): void; + getSignedUrl(cfg: GetBucketSignedUrlConfig): Promise; + getSignedUrl(cfg: GetBucketSignedUrlConfig, callback: GetSignedUrlCallback): void; + lock(metageneration: number | string): Promise; + lock(metageneration: number | string, callback: BucketLockCallback): void; + makePrivate(options?: MakeBucketPrivateOptions): Promise; + makePrivate(callback: MakeBucketPrivateCallback): void; + makePrivate(options: MakeBucketPrivateOptions, callback: MakeBucketPrivateCallback): void; + makePublic(options?: MakeBucketPublicOptions): Promise; + makePublic(callback: MakeBucketPublicCallback): void; + makePublic(options: MakeBucketPublicOptions, callback: MakeBucketPublicCallback): void; + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id: string): Notification; + removeRetentionPeriod(options?: SetBucketMetadataOptions): Promise; + removeRetentionPeriod(callback: SetBucketMetadataCallback): void; + removeRetentionPeriod(options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + setLabels(labels: Labels, options?: SetLabelsOptions): Promise; + setLabels(labels: Labels, callback: SetLabelsCallback): void; + setLabels(labels: Labels, options: SetLabelsOptions, callback: SetLabelsCallback): void; + setMetadata(metadata: BucketMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: BucketMetadata, callback: MetadataCallback): void; + setMetadata(metadata: BucketMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setRetentionPeriod(duration: number, options?: SetBucketMetadataOptions): Promise; + setRetentionPeriod(duration: number, callback: SetBucketMetadataCallback): void; + setRetentionPeriod(duration: number, options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options?: SetBucketMetadataOptions): Promise; + setCorsConfiguration(corsConfiguration: Cors[], callback: SetBucketMetadataCallback): void; + setCorsConfiguration(corsConfiguration: Cors[], options: SetBucketMetadataOptions, callback: SetBucketMetadataCallback): void; + setStorageClass(storageClass: string, options?: SetBucketStorageClassOptions): Promise; + setStorageClass(storageClass: string, callback: SetBucketStorageClassCallback): void; + setStorageClass(storageClass: string, options: SetBucketStorageClassOptions, callback: SetBucketStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + upload(pathString: string, options?: UploadOptions): Promise; + upload(pathString: string, options: UploadOptions, callback: UploadCallback): void; + upload(pathString: string, callback: UploadCallback): void; + makeAllFilesPublicPrivate_(options?: MakeAllFilesPublicPrivateOptions): Promise; + makeAllFilesPublicPrivate_(callback: MakeAllFilesPublicPrivateCallback): void; + makeAllFilesPublicPrivate_(options: MakeAllFilesPublicPrivateOptions, callback: MakeAllFilesPublicPrivateCallback): void; + getId(): string; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; +} +/** + * Reference to the {@link Bucket} class. + * @name module:@google-cloud/storage.Bucket + * @see Bucket + */ +export { Bucket }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/bucket.js b/node_modules/@google-cloud/storage/build/esm/src/bucket.js new file mode 100644 index 0000000..d165c75 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/bucket.js @@ -0,0 +1,3499 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { ServiceObject, util, } from './nodejs-common/index.js'; +import { paginator } from '@google-cloud/paginator'; +import { promisifyAll } from '@google-cloud/promisify'; +import * as fs from 'fs'; +import * as mime from 'mime-types'; +import * as path from 'path'; +import pLimit from 'p-limit'; +import { promisify } from 'util'; +import AsyncRetry from 'async-retry'; +import { convertObjKeysToSnakeCase } from './util.js'; +import { Acl } from './acl.js'; +import { File, } from './file.js'; +import { Iam } from './iam.js'; +import { Notification } from './notification.js'; +import { IdempotencyStrategy, } from './storage.js'; +import { URLSigner, } from './signer.js'; +import { Readable } from 'stream'; +import { URL } from 'url'; +export var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (BucketActionToHTTPMethod = {})); +export var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (AvailableServiceObjectMethods = {})); +export var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new Iam(this); + this.getFilesStream = paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime.contentType(destinationFile.name); + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: convertObjKeysToSnakeCase(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = pLimit(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return promisify(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return promisify(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = AsyncRetry(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = pLimit(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification'], +}); +/** + * Reference to the {@link Bucket} class. + * @name module:@google-cloud/storage.Bucket + * @see Bucket + */ +export { Bucket }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/channel.d.ts b/node_modules/@google-cloud/storage/build/esm/src/channel.d.ts new file mode 100644 index 0000000..6bb52b9 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/channel.d.ts @@ -0,0 +1,33 @@ +import { BaseMetadata, ServiceObject } from './nodejs-common/index.js'; +import { Storage } from './storage.js'; +export interface StopCallback { + (err: Error | null, apiResponse?: unknown): void; +} +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +declare class Channel extends ServiceObject { + constructor(storage: Storage, id: string, resourceId: string); + stop(): Promise; + stop(callback: StopCallback): void; +} +/** + * Reference to the {@link Channel} class. + * @name module:@google-cloud/storage.Channel + * @see Channel + */ +export { Channel }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/channel.js b/node_modules/@google-cloud/storage/build/esm/src/channel.js new file mode 100644 index 0000000..f6328cf --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/channel.js @@ -0,0 +1,106 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { ServiceObject, util } from './nodejs-common/index.js'; +import { promisifyAll } from '@google-cloud/promisify'; +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Channel); +/** + * Reference to the {@link Channel} class. + * @name module:@google-cloud/storage.Channel + * @see Channel + */ +export { Channel }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/crc32c.d.ts b/node_modules/@google-cloud/storage/build/esm/src/crc32c.d.ts new file mode 100644 index 0000000..a15bd96 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/crc32c.d.ts @@ -0,0 +1,145 @@ +/// +/// +import { PathLike } from 'fs'; +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +declare const CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; +declare const CRC32C_EXTENSION_TABLE: Int32Array; +/** An interface for CRC32C hashing and validation */ +interface CRC32CValidator { + /** + * A method returning the CRC32C as a base64-encoded string. + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + toString: () => string; + /** + * A method validating a base64-encoded CRC32C string. + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + */ + validate: (value: string) => boolean; + /** + * A method for passing `Buffer`s for CRC32C generation. + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + */ + update: (data: Buffer) => void; +} +/** A function that generates a CRC32C Validator */ +interface CRC32CValidatorGenerator { + /** Should return a new, ready-to-use `CRC32CValidator` */ + (): CRC32CValidator; +} +declare const CRC32C_DEFAULT_VALIDATOR_GENERATOR: CRC32CValidatorGenerator; +declare const CRC32C_EXCEPTION_MESSAGES: { + readonly INVALID_INIT_BASE64_RANGE: (l: number) => string; + readonly INVALID_INIT_BUFFER_LENGTH: (l: number) => string; + readonly INVALID_INIT_INTEGER: (l: number) => string; +}; +declare class CRC32C implements CRC32CValidator { + #private; + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue?: number); + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data: Buffer): void; + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input: Buffer | CRC32CValidator | string | number): boolean; + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer(): Buffer; + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON(): string; + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString(): string; + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf(): number; + static readonly CRC32C_EXTENSIONS: readonly [0, 4067132163, 3778769143, 324072436, 3348797215, 904991772, 648144872, 3570033899, 2329499855, 2024987596, 1809983544, 2575936315, 1296289744, 3207089363, 2893594407, 1578318884, 274646895, 3795141740, 4049975192, 51262619, 3619967088, 632279923, 922689671, 3298075524, 2592579488, 1760304291, 2075979607, 2312596564, 1562183871, 2943781820, 3156637768, 1313733451, 549293790, 3537243613, 3246849577, 871202090, 3878099393, 357341890, 102525238, 4101499445, 2858735121, 1477399826, 1264559846, 3107202533, 1845379342, 2677391885, 2361733625, 2125378298, 820201905, 3263744690, 3520608582, 598981189, 4151959214, 85089709, 373468761, 3827903834, 3124367742, 1213305469, 1526817161, 2842354314, 2107672161, 2412447074, 2627466902, 1861252501, 1098587580, 3004210879, 2688576843, 1378610760, 2262928035, 1955203488, 1742404180, 2511436119, 3416409459, 969524848, 714683780, 3639785095, 205050476, 4266873199, 3976438427, 526918040, 1361435347, 2739821008, 2954799652, 1114974503, 2529119692, 1691668175, 2005155131, 2247081528, 3690758684, 697762079, 986182379, 3366744552, 476452099, 3993867776, 4250756596, 255256311, 1640403810, 2477592673, 2164122517, 1922457750, 2791048317, 1412925310, 1197962378, 3037525897, 3944729517, 427051182, 170179418, 4165941337, 746937522, 3740196785, 3451792453, 1070968646, 1905808397, 2213795598, 2426610938, 1657317369, 3053634322, 1147748369, 1463399397, 2773627110, 4215344322, 153784257, 444234805, 3893493558, 1021025245, 3467647198, 3722505002, 797665321, 2197175160, 1889384571, 1674398607, 2443626636, 1164749927, 3070701412, 2757221520, 1446797203, 137323447, 4198817972, 3910406976, 461344835, 3484808360, 1037989803, 781091935, 3705997148, 2460548119, 1623424788, 1939049696, 2180517859, 1429367560, 2807687179, 3020495871, 1180866812, 410100952, 3927582683, 4182430767, 186734380, 3756733383, 763408580, 1053836080, 3434856499, 2722870694, 1344288421, 1131464017, 2971354706, 1708204729, 2545590714, 2229949006, 1988219213, 680717673, 3673779818, 3383336350, 1002577565, 4010310262, 493091189, 238226049, 4233660802, 2987750089, 1082061258, 1395524158, 2705686845, 1972364758, 2279892693, 2494862625, 1725896226, 952904198, 3399985413, 3656866545, 731699698, 4283874585, 222117402, 510512622, 3959836397, 3280807620, 837199303, 582374963, 3504198960, 68661723, 4135334616, 3844915500, 390545967, 1230274059, 3141532936, 2825850620, 1510247935, 2395924756, 2091215383, 1878366691, 2644384480, 3553878443, 565732008, 854102364, 3229815391, 340358836, 3861050807, 4117890627, 119113024, 1493875044, 2875275879, 3090270611, 1247431312, 2660249211, 1828433272, 2141937292, 2378227087, 3811616794, 291187481, 34330861, 4032846830, 615137029, 3603020806, 3314634738, 939183345, 1776939221, 2609017814, 2295496738, 2058945313, 2926798794, 1545135305, 1330124605, 3173225534, 4084100981, 17165430, 307568514, 3762199681, 888469610, 3332340585, 3587147933, 665062302, 2042050490, 2346497209, 2559330125, 1793573966, 3190661285, 1279665062, 1595330642, 2910671697]; + static readonly CRC32C_EXTENSION_TABLE: Int32Array; + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + private static fromBuffer; + static fromFile(file: PathLike): Promise; + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + private static fromString; + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + private static fromNumber; + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value: ArrayBuffer | ArrayBufferView | CRC32CValidator | string | number): CRC32C; +} +export { CRC32C, CRC32C_DEFAULT_VALIDATOR_GENERATOR, CRC32C_EXCEPTION_MESSAGES, CRC32C_EXTENSIONS, CRC32C_EXTENSION_TABLE, CRC32CValidator, CRC32CValidatorGenerator, }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/crc32c.js b/node_modules/@google-cloud/storage/build/esm/src/crc32c.js new file mode 100644 index 0000000..642f1e3 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/crc32c.js @@ -0,0 +1,247 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +import { createReadStream } from 'fs'; +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + createReadStream(file) + .on('data', (d) => crc32c.update(d)) + .on('end', resolve) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +export { CRC32C, CRC32C_DEFAULT_VALIDATOR_GENERATOR, CRC32C_EXCEPTION_MESSAGES, CRC32C_EXTENSIONS, CRC32C_EXTENSION_TABLE, }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/file.d.ts b/node_modules/@google-cloud/storage/build/esm/src/file.d.ts new file mode 100644 index 0000000..2b315a8 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/file.d.ts @@ -0,0 +1,961 @@ +/// +/// +/// +/// +import { BodyResponseCallback, DecorateRequestOptions, GetConfig, MetadataCallback, ServiceObject, SetMetadataResponse } from './nodejs-common/index.js'; +import * as resumableUpload from './resumable-upload.js'; +import { Writable, Readable, PipelineSource } from 'stream'; +import * as http from 'http'; +import { PreconditionOptions, Storage } from './storage.js'; +import { AvailableServiceObjectMethods, Bucket } from './bucket.js'; +import { Acl, AclMetadata } from './acl.js'; +import { GetSignedUrlResponse, GetSignedUrlCallback, URLSigner, SignerGetSignedUrlConfig, Query } from './signer.js'; +import { Duplexify, GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +import { CRC32C, CRC32CValidatorGenerator } from './crc32c.js'; +import { URL } from 'url'; +import { BaseMetadata, DeleteCallback, DeleteOptions, GetResponse, InstanceResponseCallback, RequestResponse, SetMetadataOptions } from './nodejs-common/service-object.js'; +import * as r from 'teeny-request'; +export type GetExpirationDateResponse = [Date]; +export interface GetExpirationDateCallback { + (err: Error | null, expirationDate?: Date | null, apiResponse?: unknown): void; +} +export interface PolicyDocument { + string: string; + base64: string; + signature: string; +} +export type SaveData = string | Buffer | PipelineSource; +export type GenerateSignedPostPolicyV2Response = [PolicyDocument]; +export interface GenerateSignedPostPolicyV2Callback { + (err: Error | null, policy?: PolicyDocument): void; +} +export interface GenerateSignedPostPolicyV2Options { + equals?: string[] | string[][]; + expires: string | number | Date; + startsWith?: string[] | string[][]; + acl?: string; + successRedirect?: string; + successStatus?: string; + contentLengthRange?: { + min?: number; + max?: number; + }; + /** + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string; +} +export interface PolicyFields { + [key: string]: string; +} +export interface GenerateSignedPostPolicyV4Options { + expires: string | number | Date; + bucketBoundHostname?: string; + virtualHostedStyle?: boolean; + conditions?: object[]; + fields?: PolicyFields; + /** + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string; +} +export interface GenerateSignedPostPolicyV4Callback { + (err: Error | null, output?: SignedPostPolicyV4Output): void; +} +export type GenerateSignedPostPolicyV4Response = [SignedPostPolicyV4Output]; +export interface SignedPostPolicyV4Output { + url: string; + fields: PolicyFields; +} +export interface GetSignedUrlConfig extends Pick { + action: 'read' | 'write' | 'delete' | 'resumable'; + version?: 'v2' | 'v4'; + virtualHostedStyle?: boolean; + cname?: string; + contentMd5?: string; + contentType?: string; + expires: string | number | Date; + accessibleAt?: string | number | Date; + extensionHeaders?: http.OutgoingHttpHeaders; + promptSaveAs?: string; + responseDisposition?: string; + responseType?: string; + queryParams?: Query; +} +export interface GetFileMetadataOptions { + userProject?: string; +} +export type GetFileMetadataResponse = [FileMetadata, unknown]; +export interface GetFileMetadataCallback { + (err: Error | null, metadata?: FileMetadata, apiResponse?: unknown): void; +} +export interface GetFileOptions extends GetConfig { + userProject?: string; + generation?: number; + softDeleted?: boolean; +} +export type GetFileResponse = [File, unknown]; +export interface GetFileCallback { + (err: Error | null, file?: File, apiResponse?: unknown): void; +} +export interface FileExistsOptions { + userProject?: string; +} +export type FileExistsResponse = [boolean]; +export interface FileExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface DeleteFileOptions { + ignoreNotFound?: boolean; + userProject?: string; +} +export type DeleteFileResponse = [unknown]; +export interface DeleteFileCallback { + (err: Error | null, apiResponse?: unknown): void; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'isPartialUpload' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject'; +export interface CreateResumableUploadOptions extends Pick { + /** + * A CRC32C to resume from when continuing a previous upload. It is recommended + * to capture the `crc32c` event from previous upload sessions to provide in + * subsequent requests in order to accurately track the upload. This is **required** + * when validating a final portion of the uploaded object. + * + * @see {@link CRC32C.from} for possible values. + */ + resumeCRC32C?: Parameters<(typeof CRC32C)['from']>[0]; + preconditionOpts?: PreconditionOptions; + [GCCL_GCS_CMD_KEY]?: resumableUpload.UploadConfig[typeof GCCL_GCS_CMD_KEY]; +} +export type CreateResumableUploadResponse = [string]; +export interface CreateResumableUploadCallback { + (err: Error | null, uri?: string): void; +} +export interface CreateWriteStreamOptions extends CreateResumableUploadOptions { + contentType?: string; + gzip?: string | boolean; + resumable?: boolean; + timeout?: number; + validation?: string | boolean; +} +export interface MakeFilePrivateOptions { + metadata?: FileMetadata; + strict?: boolean; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type MakeFilePrivateResponse = [unknown]; +export type MakeFilePrivateCallback = SetFileMetadataCallback; +export interface IsPublicCallback { + (err: Error | null, resp?: boolean): void; +} +export type IsPublicResponse = [boolean]; +export type MakeFilePublicResponse = [unknown]; +export interface MakeFilePublicCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type MoveResponse = [unknown]; +export interface MoveCallback { + (err: Error | null, destinationFile?: File | null, apiResponse?: unknown): void; +} +export interface MoveOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type RenameOptions = MoveOptions; +export type RenameResponse = MoveResponse; +export type RenameCallback = MoveCallback; +export type RotateEncryptionKeyOptions = string | Buffer | EncryptionKeyOptions; +export interface EncryptionKeyOptions { + encryptionKey?: string | Buffer; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; +} +export type RotateEncryptionKeyCallback = CopyCallback; +export type RotateEncryptionKeyResponse = CopyResponse; +export declare enum ActionToHTTPMethod { + read = "GET", + write = "PUT", + delete = "DELETE", + resumable = "POST" +} +/** + * @deprecated - no longer used + */ +export declare const STORAGE_POST_POLICY_BASE_URL = "https://storage.googleapis.com"; +export interface FileOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + encryptionKey?: string | Buffer; + generation?: number | string; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface CopyOptions { + cacheControl?: string; + contentEncoding?: string; + contentType?: string; + contentDisposition?: string; + destinationKmsKeyName?: string; + metadata?: FileMetadata; + predefinedAcl?: string; + token?: string; + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export type CopyResponse = [File, unknown]; +export interface CopyCallback { + (err: Error | null, file?: File | null, apiResponse?: unknown): void; +} +export type DownloadResponse = [Buffer]; +export type DownloadCallback = (err: RequestError | null, contents: Buffer) => void; +export interface DownloadOptions extends CreateReadStreamOptions { + destination?: string; +} +export interface CreateReadStreamOptions { + userProject?: string; + validation?: 'md5' | 'crc32c' | false | true; + start?: number; + end?: number; + decompress?: boolean; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface SaveOptions extends CreateWriteStreamOptions { + onUploadProgress?: (progressEvent: any) => void; +} +export interface SaveCallback { + (err?: Error | null): void; +} +export interface SetFileMetadataOptions { + userProject?: string; +} +export interface SetFileMetadataCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export type SetFileMetadataResponse = [unknown]; +export type SetStorageClassResponse = [unknown]; +export interface SetStorageClassOptions { + userProject?: string; + preconditionOpts?: PreconditionOptions; +} +export interface SetStorageClassCallback { + (err?: Error | null, apiResponse?: unknown): void; +} +export interface RestoreOptions extends PreconditionOptions { + generation: number; + projection?: 'full' | 'noAcl'; +} +export interface FileMetadata extends BaseMetadata { + acl?: AclMetadata[] | null; + bucket?: string; + cacheControl?: string; + componentCount?: number; + contentDisposition?: string; + contentEncoding?: string; + contentLanguage?: string; + contentType?: string; + crc32c?: string; + customerEncryption?: { + encryptionAlgorithm?: string; + keySha256?: string; + }; + customTime?: string; + eventBasedHold?: boolean | null; + readonly eventBasedHoldReleaseTime?: string; + generation?: string | number; + hardDeleteTime?: string; + kmsKeyName?: string; + md5Hash?: string; + mediaLink?: string; + metadata?: { + [key: string]: string; + }; + metageneration?: string | number; + name?: string; + owner?: { + entity?: string; + entityId?: string; + }; + retention?: { + retainUntilTime?: string; + mode?: string; + } | null; + retentionExpirationTime?: string; + size?: string | number; + softDeleteTime?: string; + storageClass?: string; + temporaryHold?: boolean | null; + timeCreated?: string; + timeDeleted?: string; + timeStorageClassUpdated?: string; + updated?: string; +} +export declare class RequestError extends Error { + code?: string; + errors?: Error[]; +} +export declare enum FileExceptionMessages { + EXPIRATION_TIME_NA = "An expiration time is not available.", + DESTINATION_NO_NAME = "Destination file should have a name.", + INVALID_VALIDATION_FILE_RANGE = "Cannot use validation with file ranges (start/end).", + MD5_NOT_AVAILABLE = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects.", + EQUALS_CONDITION_TWO_ELEMENTS = "Equals condition must be an array of 2 elements.", + STARTS_WITH_TWO_ELEMENTS = "StartsWith condition must be an array of 2 elements.", + CONTENT_LENGTH_RANGE_MIN_MAX = "ContentLengthRange must have numeric min & max fields.", + DOWNLOAD_MISMATCH = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.", + UPLOAD_MISMATCH_DELETE_FAIL = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ", + UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again.", + MD5_RESUMED_UPLOAD = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value", + MISSING_RESUME_CRC32C_FINAL_UPLOAD = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`." +} +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +declare class File extends ServiceObject { + #private; + acl: Acl; + crc32cGenerator: CRC32CValidatorGenerator; + bucket: Bucket; + storage: Storage; + kmsKeyName?: string; + userProject?: string; + signer?: URLSigner; + name: string; + generation?: number; + parent: Bucket; + private encryptionKey?; + private encryptionKeyBase64?; + private encryptionKeyHash?; + private encryptionKeyInterceptor?; + private instanceRetryValue?; + instancePreconditionOpts?: PreconditionOptions; + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket: Bucket, name: string, options?: FileOptions); + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI(): URL; + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + private shouldRetryBasedOnPreconditionAndIdempotencyStrat; + copy(destination: string | Bucket | File, options?: CopyOptions): Promise; + copy(destination: string | Bucket | File, callback: CopyCallback): void; + copy(destination: string | Bucket | File, options: CopyOptions, callback: CopyCallback): void; + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options?: CreateReadStreamOptions): Readable; + createResumableUpload(options?: CreateResumableUploadOptions): Promise; + createResumableUpload(options: CreateResumableUploadOptions, callback: CreateResumableUploadCallback): void; + createResumableUpload(callback: CreateResumableUploadCallback): void; + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + createWriteStream(options?: CreateWriteStreamOptions): Writable; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + download(options?: DownloadOptions): Promise; + download(options: DownloadOptions, callback: DownloadCallback): void; + download(callback: DownloadCallback): void; + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey: string | Buffer): this; + get(options?: GetFileOptions): Promise>; + get(callback: InstanceResponseCallback): void; + get(options: GetFileOptions, callback: InstanceResponseCallback): void; + getExpirationDate(): Promise; + getExpirationDate(callback: GetExpirationDateCallback): void; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options): Promise; + generateSignedPostPolicyV2(options: GenerateSignedPostPolicyV2Options, callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV2(callback: GenerateSignedPostPolicyV2Callback): void; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options): Promise; + generateSignedPostPolicyV4(options: GenerateSignedPostPolicyV4Options, callback: GenerateSignedPostPolicyV4Callback): void; + generateSignedPostPolicyV4(callback: GenerateSignedPostPolicyV4Callback): void; + getSignedUrl(cfg: GetSignedUrlConfig): Promise; + getSignedUrl(cfg: GetSignedUrlConfig, callback: GetSignedUrlCallback): void; + isPublic(): Promise; + isPublic(callback: IsPublicCallback): void; + makePrivate(options?: MakeFilePrivateOptions): Promise; + makePrivate(callback: MakeFilePrivateCallback): void; + makePrivate(options: MakeFilePrivateOptions, callback: MakeFilePrivateCallback): void; + makePublic(): Promise; + makePublic(callback: MakeFilePublicCallback): void; + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl(): string; + move(destination: string | Bucket | File, options?: MoveOptions): Promise; + move(destination: string | Bucket | File, callback: MoveCallback): void; + move(destination: string | Bucket | File, options: MoveOptions, callback: MoveCallback): void; + rename(destinationFile: string | File, options?: RenameOptions): Promise; + rename(destinationFile: string | File, callback: RenameCallback): void; + rename(destinationFile: string | File, options: RenameOptions, callback: RenameCallback): void; + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + restore(options: RestoreOptions): Promise; + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + rotateEncryptionKey(options?: RotateEncryptionKeyOptions): Promise; + rotateEncryptionKey(callback: RotateEncryptionKeyCallback): void; + rotateEncryptionKey(options: RotateEncryptionKeyOptions, callback: RotateEncryptionKeyCallback): void; + save(data: SaveData, options?: SaveOptions): Promise; + save(data: SaveData, callback: SaveCallback): void; + save(data: SaveData, options: SaveOptions, callback: SaveCallback): void; + setMetadata(metadata: FileMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: FileMetadata, callback: MetadataCallback): void; + setMetadata(metadata: FileMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; + setStorageClass(storageClass: string, options?: SetStorageClassOptions): Promise; + setStorageClass(storageClass: string, options: SetStorageClassOptions, callback: SetStorageClassCallback): void; + setStorageClass(storageClass: string, callback?: SetStorageClassCallback): void; + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject: string): void; + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup: Duplexify, options?: CreateResumableUploadOptions): void; + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup: Duplexify, options?: CreateWriteStreamOptions): void; + disableAutoRetryConditionallyIdempotent_(coreOpts: any, methodType: AvailableServiceObjectMethods, localPreconditionOptions?: PreconditionOptions): void; + private getBufferFromReadable; +} +/** + * Reference to the {@link File} class. + * @name module:@google-cloud/storage.File + * @see File + */ +export { File }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/file.js b/node_modules/@google-cloud/storage/build/esm/src/file.js new file mode 100644 index 0000000..f0705ef --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/file.js @@ -0,0 +1,3323 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _File_instances, _File_validateIntegrity; +import { ServiceObject, util, } from './nodejs-common/index.js'; +import { promisifyAll } from '@google-cloud/promisify'; +import compressible from 'compressible'; +import * as crypto from 'crypto'; +import * as fs from 'fs'; +import mime from 'mime'; +import * as resumableUpload from './resumable-upload.js'; +import { Writable, pipeline } from 'stream'; +import * as zlib from 'zlib'; +import { ExceptionMessages, IdempotencyStrategy, } from './storage.js'; +import { AvailableServiceObjectMethods, Bucket } from './bucket.js'; +import { Acl } from './acl.js'; +import { SigningError, URLSigner, } from './signer.js'; +import { GCCL_GCS_CMD_KEY, } from './nodejs-common/util.js'; +import duplexify from 'duplexify'; +import { normalize, objectKeyToLowercase, unicodeJSONStringify, formatAsUTCISO, PassThroughShim, } from './util.js'; +import { CRC32C } from './crc32c.js'; +import { HashStreamValidator } from './hash-stream-validator.js'; +import AsyncRetry from 'async-retry'; +export var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +export const STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +export class RequestError extends Error { +} +const SEVEN_DAYS = 7 * 24 * 60 * 60; +export var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + pipeline(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[GCCL_GCS_CMD_KEY]) { + reqOpts[GCCL_GCS_CMD_KEY] = options[GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [GCCL_GCS_CMD_KEY]: options[GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = compressible(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = duplexify(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + pipeline(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + // We know that the file exists the server - now we can truncate/write to a file + receivedData = true; + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', callback) + .on('finish', callback); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = normalize(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = normalize(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = formatAsUTCISO(now, true); + const todayISO = formatAsUTCISO(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = formatAsUTCISO(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = unicodeJSONStringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = objectKeyToLowercase(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = AsyncRetry(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || Buffer.isBuffer(data)) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + pipeline(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const uploadStream = resumableUpload.upload({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [GCCL_GCS_CMD_KEY]: options[GCCL_GCS_CMD_KEY], + }); + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [GCCL_GCS_CMD_KEY]: options[GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); +/** + * Reference to the {@link File} class. + * @name module:@google-cloud/storage.File + * @see File + */ +export { File }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.d.ts b/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.d.ts new file mode 100644 index 0000000..00d85d1 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.d.ts @@ -0,0 +1,37 @@ +/// +/// +import { Transform } from 'stream'; +import { CRC32CValidatorGenerator, CRC32CValidator } from './crc32c.js'; +interface HashStreamValidatorOptions { + /** Enables CRC32C calculation. To validate a provided value use `crc32cExpected`. */ + crc32c: boolean; + /** Enables MD5 calculation. To validate a provided value use `md5Expected`. */ + md5: boolean; + /** A CRC32C instance for validation. To validate a provided value use `crc32cExpected`. */ + crc32cInstance: CRC32CValidator; + /** Set a custom CRC32C generator. Used if `crc32cInstance` has not been provided. */ + crc32cGenerator: CRC32CValidatorGenerator; + /** Sets the expected CRC32C value to verify once all data has been consumed. Also sets the `crc32c` option to `true` */ + crc32cExpected?: string; + /** Sets the expected MD5 value to verify once all data has been consumed. Also sets the `md5` option to `true` */ + md5Expected?: string; + /** Indicates whether or not to run a validation check or only update the hash values */ + updateHashesOnly?: boolean; +} +declare class HashStreamValidator extends Transform { + #private; + readonly crc32cEnabled: boolean; + readonly md5Enabled: boolean; + readonly crc32cExpected: string | undefined; + readonly md5Expected: string | undefined; + readonly updateHashesOnly: boolean; + constructor(options?: Partial); + /** + * Return the current CRC32C value, if available. + */ + get crc32c(): string | undefined; + _flush(callback: (error?: Error | null | undefined) => void): void; + _transform(chunk: Buffer, encoding: BufferEncoding, callback: (e?: Error) => void): void; + test(hash: 'crc32c' | 'md5', sum: Buffer | string): boolean; +} +export { HashStreamValidator, HashStreamValidatorOptions }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.js b/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.js new file mode 100644 index 0000000..31c6d5a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/hash-stream-validator.js @@ -0,0 +1,116 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +import { createHash } from 'crypto'; +import { Transform } from 'stream'; +import { CRC32C_DEFAULT_VALIDATOR_GENERATOR, } from './crc32c.js'; +import { FileExceptionMessages, RequestError } from './file.js'; +class HashStreamValidator extends Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, createHash('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new RequestError(FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); +export { HashStreamValidator }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/hmacKey.d.ts b/node_modules/@google-cloud/storage/build/esm/src/hmacKey.d.ts new file mode 100644 index 0000000..4433a83 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/hmacKey.d.ts @@ -0,0 +1,93 @@ +import { ServiceObject, MetadataCallback, SetMetadataResponse } from './nodejs-common/index.js'; +import { BaseMetadata, SetMetadataOptions } from './nodejs-common/service-object.js'; +import { Storage } from './storage.js'; +export interface HmacKeyOptions { + projectId?: string; +} +export interface HmacKeyMetadata extends BaseMetadata { + accessId?: string; + etag?: string; + projectId?: string; + serviceAccountEmail?: string; + state?: string; + timeCreated?: string; + updated?: string; +} +export interface SetHmacKeyMetadataOptions { + /** + * This parameter is currently ignored. + */ + userProject?: string; +} +export interface SetHmacKeyMetadata { + state?: 'ACTIVE' | 'INACTIVE'; + etag?: string; +} +export interface HmacKeyMetadataCallback { + (err: Error | null, metadata?: HmacKeyMetadata, apiResponse?: unknown): void; +} +export type HmacKeyMetadataResponse = [HmacKeyMetadata, unknown]; +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +export declare class HmacKey extends ServiceObject { + /** + * A reference to the {@link Storage} associated with this {@link HmacKey} + * instance. + * @name HmacKey#storage + * @type {Storage} + */ + storage: Storage; + private instanceRetryValue?; + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage: Storage, accessId: string, options?: HmacKeyOptions); + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: HmacKeyMetadata, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: HmacKeyMetadata, callback: MetadataCallback): void; + setMetadata(metadata: HmacKeyMetadata, options: SetMetadataOptions, callback: MetadataCallback): void; +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/hmacKey.js b/node_modules/@google-cloud/storage/build/esm/src/hmacKey.js new file mode 100644 index 0000000..9030248 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/hmacKey.js @@ -0,0 +1,332 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { ServiceObject, } from './nodejs-common/index.js'; +import { IdempotencyStrategy } from './storage.js'; +import { promisifyAll } from '@google-cloud/promisify'; +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +export class HmacKey extends ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(HmacKey); diff --git a/node_modules/@google-cloud/storage/build/esm/src/iam.d.ts b/node_modules/@google-cloud/storage/build/esm/src/iam.d.ts new file mode 100644 index 0000000..3ceb19c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/iam.d.ts @@ -0,0 +1,117 @@ +import { Bucket } from './bucket.js'; +export interface GetPolicyOptions { + userProject?: string; + requestedPolicyVersion?: number; +} +export type GetPolicyResponse = [Policy, unknown]; +/** + * @callback GetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface GetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: unknown): void; +} +/** + * @typedef {object} SetPolicyOptions + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface SetPolicyOptions { + userProject?: string; +} +/** + * @typedef {array} SetPolicyResponse + * @property {object} 0 The policy. + * @property {object} 1 The full API response. + */ +export type SetPolicyResponse = [Policy, unknown]; +/** + * @callback SetPolicyCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The policy. + * @param {object} apiResponse The full API response. + */ +export interface SetPolicyCallback { + (err?: Error | null, acl?: Policy, apiResponse?: object): void; +} +export interface Policy { + bindings: PolicyBinding[]; + version?: number; + etag?: string; +} +export interface PolicyBinding { + role: string; + members: string[]; + condition?: Expr; +} +export interface Expr { + title?: string; + description?: string; + expression: string; +} +/** + * @typedef {array} TestIamPermissionsResponse + * @property {object} 0 A subset of permissions that the caller is allowed. + * @property {object} 1 The full API response. + */ +export type TestIamPermissionsResponse = [{ + [key: string]: boolean; +}, unknown]; +/** + * @callback TestIamPermissionsCallback + * @param {?Error} err Request error, if any. + * @param {object} acl A subset of permissions that the caller is allowed. + * @param {object} apiResponse The full API response. + */ +export interface TestIamPermissionsCallback { + (err?: Error | null, acl?: { + [key: string]: boolean; + } | null, apiResponse?: unknown): void; +} +/** + * @typedef {object} TestIamPermissionsOptions Configuration options for Iam#testPermissions(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ +export interface TestIamPermissionsOptions { + userProject?: string; +} +export declare enum IAMExceptionMessages { + POLICY_OBJECT_REQUIRED = "A policy object is required.", + PERMISSIONS_REQUIRED = "Permissions are required." +} +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +declare class Iam { + private request_; + private resourceId_; + constructor(bucket: Bucket); + getPolicy(options?: GetPolicyOptions): Promise; + getPolicy(options: GetPolicyOptions, callback: GetPolicyCallback): void; + getPolicy(callback: GetPolicyCallback): void; + setPolicy(policy: Policy, options?: SetPolicyOptions): Promise; + setPolicy(policy: Policy, callback: SetPolicyCallback): void; + setPolicy(policy: Policy, options: SetPolicyOptions, callback: SetPolicyCallback): void; + testPermissions(permissions: string | string[], options?: TestIamPermissionsOptions): Promise; + testPermissions(permissions: string | string[], callback: TestIamPermissionsCallback): void; + testPermissions(permissions: string | string[], options: TestIamPermissionsOptions, callback: TestIamPermissionsCallback): void; +} +export { Iam }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/iam.js b/node_modules/@google-cloud/storage/build/esm/src/iam.js new file mode 100644 index 0000000..7ca2ce8 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/iam.js @@ -0,0 +1,303 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { promisifyAll } from '@google-cloud/promisify'; +import { normalize } from './util.js'; +export var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = normalize(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = normalize(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = normalize(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Iam); +export { Iam }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/index.d.ts b/node_modules/@google-cloud/storage/build/esm/src/index.d.ts new file mode 100644 index 0000000..6da8101 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/index.d.ts @@ -0,0 +1,57 @@ +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +export { ApiError } from './nodejs-common/index.js'; +export { BucketCallback, BucketOptions, CreateBucketQuery, CreateBucketRequest, CreateBucketResponse, CreateHmacKeyCallback, CreateHmacKeyOptions, CreateHmacKeyResponse, GetBucketsCallback, GetBucketsRequest, GetBucketsResponse, GetHmacKeysCallback, GetHmacKeysOptions, GetHmacKeysResponse, GetServiceAccountCallback, GetServiceAccountOptions, GetServiceAccountResponse, HmacKeyResourceResponse, IdempotencyStrategy, PreconditionOptions, ServiceAccount, Storage, StorageOptions, } from './storage.js'; +export { AclMetadata, AccessControlObject, AclOptions, AddAclCallback, AddAclOptions, AddAclResponse, GetAclCallback, GetAclOptions, GetAclResponse, RemoveAclCallback, RemoveAclOptions, RemoveAclResponse, UpdateAclCallback, UpdateAclOptions, UpdateAclResponse, } from './acl.js'; +export { Bucket, BucketExistsCallback, BucketExistsOptions, BucketExistsResponse, BucketLockCallback, BucketLockResponse, BucketMetadata, CombineCallback, CombineOptions, CombineResponse, CreateChannelCallback, CreateChannelConfig, CreateChannelOptions, CreateChannelResponse, CreateNotificationCallback, CreateNotificationOptions, CreateNotificationResponse, DeleteBucketCallback, DeleteBucketOptions, DeleteBucketResponse, DeleteFilesCallback, DeleteFilesOptions, DeleteLabelsCallback, DeleteLabelsResponse, DisableRequesterPaysCallback, DisableRequesterPaysResponse, EnableRequesterPaysCallback, EnableRequesterPaysResponse, GetBucketCallback, GetBucketMetadataCallback, GetBucketMetadataOptions, GetBucketMetadataResponse, GetBucketOptions, GetBucketResponse, GetBucketSignedUrlConfig, GetFilesCallback, GetFilesOptions, GetFilesResponse, GetLabelsCallback, GetLabelsOptions, GetLabelsResponse, GetNotificationsCallback, GetNotificationsOptions, GetNotificationsResponse, Labels, LifecycleAction, LifecycleCondition, LifecycleRule, MakeBucketPrivateCallback, MakeBucketPrivateOptions, MakeBucketPrivateResponse, MakeBucketPublicCallback, MakeBucketPublicOptions, MakeBucketPublicResponse, SetBucketMetadataCallback, SetBucketMetadataOptions, SetBucketMetadataResponse, SetBucketStorageClassCallback, SetBucketStorageClassOptions, SetLabelsCallback, SetLabelsOptions, SetLabelsResponse, UploadCallback, UploadOptions, UploadResponse, } from './bucket.js'; +export * from './crc32c.js'; +export { Channel, StopCallback } from './channel.js'; +export { CopyCallback, CopyOptions, CopyResponse, CreateReadStreamOptions, CreateResumableUploadCallback, CreateResumableUploadOptions, CreateResumableUploadResponse, CreateWriteStreamOptions, DeleteFileCallback, DeleteFileOptions, DeleteFileResponse, DownloadCallback, DownloadOptions, DownloadResponse, EncryptionKeyOptions, File, FileExistsCallback, FileExistsOptions, FileExistsResponse, FileMetadata, FileOptions, GetExpirationDateCallback, GetExpirationDateResponse, GetFileCallback, GetFileMetadataCallback, GetFileMetadataOptions, GetFileMetadataResponse, GetFileOptions, GetFileResponse, GenerateSignedPostPolicyV2Callback, GenerateSignedPostPolicyV2Options, GenerateSignedPostPolicyV2Response, GenerateSignedPostPolicyV4Callback, GenerateSignedPostPolicyV4Options, GenerateSignedPostPolicyV4Response, GetSignedUrlConfig, MakeFilePrivateCallback, MakeFilePrivateOptions, MakeFilePrivateResponse, MakeFilePublicCallback, MakeFilePublicResponse, MoveCallback, MoveOptions, MoveResponse, PolicyDocument, PolicyFields, PredefinedAcl, RotateEncryptionKeyCallback, RotateEncryptionKeyOptions, RotateEncryptionKeyResponse, SaveCallback, SaveOptions, SetFileMetadataCallback, SetFileMetadataOptions, SetFileMetadataResponse, SetStorageClassCallback, SetStorageClassOptions, SetStorageClassResponse, SignedPostPolicyV4Output, } from './file.js'; +export * from './hash-stream-validator.js'; +export { HmacKey, HmacKeyMetadata, HmacKeyMetadataCallback, HmacKeyMetadataResponse, SetHmacKeyMetadata, SetHmacKeyMetadataOptions, } from './hmacKey.js'; +export { GetPolicyCallback, GetPolicyOptions, GetPolicyResponse, Iam, Policy, SetPolicyCallback, SetPolicyOptions, SetPolicyResponse, TestIamPermissionsCallback, TestIamPermissionsOptions, TestIamPermissionsResponse, } from './iam.js'; +export { DeleteNotificationCallback, DeleteNotificationOptions, GetNotificationCallback, GetNotificationMetadataCallback, GetNotificationMetadataOptions, GetNotificationMetadataResponse, GetNotificationOptions, GetNotificationResponse, Notification, NotificationMetadata, } from './notification.js'; +export { GetSignedUrlCallback, GetSignedUrlResponse } from './signer.js'; +export * from './transfer-manager.js'; diff --git a/node_modules/@google-cloud/storage/build/esm/src/index.js b/node_modules/@google-cloud/storage/build/esm/src/index.js new file mode 100644 index 0000000..445cc36 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/index.js @@ -0,0 +1,68 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +export { ApiError } from './nodejs-common/index.js'; +export { IdempotencyStrategy, Storage, } from './storage.js'; +export { Bucket, } from './bucket.js'; +export * from './crc32c.js'; +export { Channel } from './channel.js'; +export { File, } from './file.js'; +export * from './hash-stream-validator.js'; +export { HmacKey, } from './hmacKey.js'; +export { Iam, } from './iam.js'; +export { Notification, } from './notification.js'; +export * from './transfer-manager.js'; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.d.ts b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.d.ts new file mode 100644 index 0000000..72588c7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.d.ts @@ -0,0 +1,19 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export { GoogleAuthOptions } from 'google-auth-library'; +export { Service, ServiceConfig, ServiceOptions, StreamRequestOptions, } from './service.js'; +export { BaseMetadata, DeleteCallback, ExistsCallback, GetConfig, InstanceResponseCallback, Interceptor, MetadataCallback, MetadataResponse, Methods, ResponseCallback, ServiceObject, ServiceObjectConfig, ServiceObjectParent, SetMetadataResponse, } from './service-object.js'; +export { Abortable, AbortableDuplex, ApiError, BodyResponseCallback, DecorateRequestOptions, ResponseBody, util, } from './util.js'; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.js b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.js new file mode 100644 index 0000000..8056304 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/index.js @@ -0,0 +1,3 @@ +export { Service, } from './service.js'; +export { ServiceObject, } from './service-object.js'; +export { ApiError, util, } from './util.js'; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.d.ts b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.d.ts new file mode 100644 index 0000000..083962b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.d.ts @@ -0,0 +1,218 @@ +/// +import { EventEmitter } from 'events'; +import * as r from 'teeny-request'; +import { ApiError, BodyResponseCallback, DecorateRequestOptions } from './util.js'; +export type RequestResponse = [unknown, r.Response]; +export interface ServiceObjectParent { + interceptors: Interceptor[]; + getRequestInterceptors(): Function[]; + requestStream(reqOpts: DecorateRequestOptions): r.Request; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; +} +export interface Interceptor { + request(opts: r.Options): DecorateRequestOptions; +} +export type GetMetadataOptions = object; +export type MetadataResponse = [K, r.Response]; +export type MetadataCallback = (err: Error | null, metadata?: K, apiResponse?: r.Response) => void; +export type ExistsOptions = object; +export interface ExistsCallback { + (err: Error | null, exists?: boolean): void; +} +export interface ServiceObjectConfig { + /** + * The base URL to make API requests to. + */ + baseUrl?: string; + /** + * The method which creates this object. + */ + createMethod?: Function; + /** + * The identifier of the object. For example, the name of a Storage bucket or + * Pub/Sub topic. + */ + id?: string; + /** + * A map of each method name that should be inherited. + */ + methods?: Methods; + /** + * The parent service instance. For example, an instance of Storage if the + * object is Bucket. + */ + parent: ServiceObjectParent; + /** + * Override of projectId, used to allow access to resources in another project. + * For example, a BigQuery dataset in another project to which the user has been + * granted permission. + */ + projectId?: string; +} +export interface Methods { + [methodName: string]: { + reqOpts?: r.CoreOptions; + } | boolean; +} +export interface InstanceResponseCallback { + (err: ApiError | null, instance?: T | null, apiResponse?: r.Response): void; +} +export interface CreateOptions { +} +export type CreateResponse = any[]; +export interface CreateCallback { + (err: ApiError | null, instance?: T | null, ...args: any[]): void; +} +export type DeleteOptions = { + ignoreNotFound?: boolean; + ifGenerationMatch?: number | string; + ifGenerationNotMatch?: number | string; + ifMetagenerationMatch?: number | string; + ifMetagenerationNotMatch?: number | string; +} & object; +export interface DeleteCallback { + (err: Error | null, apiResponse?: r.Response): void; +} +export interface GetConfig { + /** + * Create the object if it doesn't already exist. + */ + autoCreate?: boolean; +} +export type GetOrCreateOptions = GetConfig & CreateOptions; +export type GetResponse = [T, r.Response]; +export interface ResponseCallback { + (err?: Error | null, apiResponse?: r.Response): void; +} +export type SetMetadataResponse = [K]; +export type SetMetadataOptions = object; +export interface BaseMetadata { + id?: string; + kind?: string; + etag?: string; + selfLink?: string; + [key: string]: unknown; +} +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +declare class ServiceObject extends EventEmitter { + metadata: K; + baseUrl?: string; + parent: ServiceObjectParent; + id?: string; + private createMethod?; + protected methods: Methods; + interceptors: Interceptor[]; + projectId?: string; + constructor(config: ServiceObjectConfig); + /** + * Create the object. + * + * @param {object=} options - Configuration object. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + create(options?: CreateOptions): Promise>; + create(options: CreateOptions, callback: CreateCallback): void; + create(callback: CreateCallback): void; + /** + * Delete the object. + * + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + delete(options?: DeleteOptions): Promise<[r.Response]>; + delete(options: DeleteOptions, callback: DeleteCallback): void; + delete(callback: DeleteCallback): void; + /** + * Check if the object exists. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {boolean} callback.exists - Whether the object exists or not. + */ + exists(options?: ExistsOptions): Promise<[boolean]>; + exists(options: ExistsOptions, callback: ExistsCallback): void; + exists(callback: ExistsCallback): void; + /** + * Get the object if it exists. Optionally have the object created if an + * options object is provided with `autoCreate: true`. + * + * @param {object=} options - The configuration object that will be used to + * create the object if necessary. + * @param {boolean} options.autoCreate - Create the object if it doesn't already exist. + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.instance - The instance. + * @param {object} callback.apiResponse - The full API response. + */ + get(options?: GetOrCreateOptions): Promise>; + get(callback: InstanceResponseCallback): void; + get(options: GetOrCreateOptions, callback: InstanceResponseCallback): void; + /** + * Get the metadata of this object. + * + * @param {function} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.metadata - The metadata for this object. + * @param {object} callback.apiResponse - The full API response. + */ + getMetadata(options?: GetMetadataOptions): Promise>; + getMetadata(options: GetMetadataOptions, callback: MetadataCallback): void; + getMetadata(callback: MetadataCallback): void; + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Set the metadata for this object. + * + * @param {object} metadata - The metadata to set on this object. + * @param {object=} options - Configuration options. + * @param {function=} callback - The callback function. + * @param {?error} callback.err - An error returned while making this request. + * @param {object} callback.apiResponse - The full API response. + */ + setMetadata(metadata: K, options?: SetMetadataOptions): Promise>; + setMetadata(metadata: K, callback: MetadataCallback): void; + setMetadata(metadata: K, options: SetMetadataOptions, callback: MetadataCallback): void; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions): Promise; + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} +export { ServiceObject }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.js b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.js new file mode 100644 index 0000000..b2dfa43 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service-object.js @@ -0,0 +1,289 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { promisifyAll } from '@google-cloud/promisify'; +import { EventEmitter } from 'events'; +import { util, } from './util.js'; +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +promisifyAll(ServiceObject, { exclude: ['getRequestInterceptors'] }); +export { ServiceObject }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.d.ts b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.d.ts new file mode 100644 index 0000000..e7177a2 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.d.ts @@ -0,0 +1,125 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Interceptor } from './service-object.js'; +import { BodyResponseCallback, DecorateRequestOptions, MakeAuthenticatedRequest, PackageJson } from './util.js'; +export declare const DEFAULT_PROJECT_ID_TOKEN = "{{projectId}}"; +export interface StreamRequestOptions extends DecorateRequestOptions { + shouldReturnStream: true; +} +export interface ServiceConfig { + /** + * The base URL to make API requests to. + */ + baseUrl: string; + /** + * The API Endpoint to use when connecting to the service. + * Example: storage.googleapis.com + */ + apiEndpoint: string; + /** + * The scopes required for the request. + */ + scopes: string[]; + projectIdRequired?: boolean; + packageJson: PackageJson; + /** + * Reuse an existing `AuthClient` or `GoogleAuth` client instead of creating a new one. + */ + authClient?: AuthClient | GoogleAuth; + /** + * Set to true if the endpoint is a custom URL + */ + customEndpoint?: boolean; +} +export interface ServiceOptions extends Omit { + authClient?: AuthClient | GoogleAuth; + interceptors_?: Interceptor[]; + email?: string; + token?: string; + timeout?: number; + userAgent?: string; + useAuthWithCustomEndpoint?: boolean; +} +export declare class Service { + baseUrl: string; + private globalInterceptors; + interceptors: Interceptor[]; + private packageJson; + projectId: string; + private projectIdRequired; + providedUserAgent?: string; + makeAuthenticatedRequest: MakeAuthenticatedRequest; + authClient: GoogleAuth; + apiEndpoint: string; + timeout?: number; + universeDomain: string; + customEndpoint: boolean; + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config: ServiceConfig, options?: ServiceOptions); + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors(): Function[]; + /** + * Get and update the Service's project ID. + * + * @param {function} callback - The callback function. + */ + getProjectId(): Promise; + getProjectId(callback: (err: Error | null, projectId?: string) => void): void; + protected getProjectIdAsync(): Promise; + /** + * Make an authenticated API request. + * + * @private + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + private request_; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts: DecorateRequestOptions, callback: BodyResponseCallback): void; + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts: DecorateRequestOptions): r.Request; +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.js b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.js new file mode 100644 index 0000000..69c924c --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/service.js @@ -0,0 +1,180 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DEFAULT_UNIVERSE, } from 'google-auth-library'; +import * as uuid from 'uuid'; +import { GCCL_GCS_CMD_KEY, util, } from './util.js'; +import { getRuntimeTrackingString, getUserAgentString, getModuleFormat, } from '../util.js'; +export const DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +export class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = getUserAgentString(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${getRuntimeTrackingString()} gccl/${pkg.version}-${getModuleFormat()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.d.ts b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.d.ts new file mode 100644 index 0000000..a805858 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.d.ts @@ -0,0 +1,334 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { AuthClient, GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; +import { CredentialBody } from 'google-auth-library'; +import * as r from 'teeny-request'; +import { Duplex, DuplexOptions, Readable, Writable } from 'stream'; +import { Interceptor } from './service-object.js'; +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +export declare const GCCL_GCS_CMD_KEY: unique symbol; +export type ResponseBody = any; +export interface DuplexifyOptions extends DuplexOptions { + autoDestroy?: boolean; + end?: boolean; +} +export interface Duplexify extends Duplex { + readonly destroyed: boolean; + setWritable(writable: Writable | false | null): void; + setReadable(readable: Readable | false | null): void; +} +export interface DuplexifyConstructor { + obj(writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + new (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; + (writable?: Writable | false | null, readable?: Readable | false | null, options?: DuplexifyOptions): Duplexify; +} +export interface ParsedHttpRespMessage { + resp: r.Response; + err?: ApiError; +} +export interface MakeAuthenticatedRequest { + (reqOpts: DecorateRequestOptions): Duplexify; + (reqOpts: DecorateRequestOptions, options?: MakeAuthenticatedRequestOptions): void | Abortable; + (reqOpts: DecorateRequestOptions, callback?: BodyResponseCallback): void | Abortable; + (reqOpts: DecorateRequestOptions, optionsOrCallback?: MakeAuthenticatedRequestOptions | BodyResponseCallback): void | Abortable | Duplexify; + getCredentials: (callback: (err?: Error | null, credentials?: CredentialBody) => void) => void; + authClient: GoogleAuth; +} +export interface Abortable { + abort(): void; +} +export type AbortableDuplex = Duplexify & Abortable; +export interface PackageJson { + name: string; + version: string; +} +export interface MakeAuthenticatedRequestFactoryConfig extends Omit { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * If true, just return the provided request options. Default: false. + */ + customEndpoint?: boolean; + /** + * If true, will authenticate when using a custom endpoint. Default: false. + */ + useAuthWithCustomEndpoint?: boolean; + /** + * Account email address, required for PEM/P12 usage. + */ + email?: string; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + stream?: Duplexify; + /** + * A pre-instantiated `AuthClient` or `GoogleAuth` client that should be used. + * A new client will be created if this is not set. + */ + authClient?: AuthClient | GoogleAuth; + /** + * Determines if a projectId is required for authenticated requests. Defaults to `true`. + */ + projectIdRequired?: boolean; +} +export interface MakeAuthenticatedRequestOptions { + onAuthenticated: OnAuthenticatedCallback; +} +export interface OnAuthenticatedCallback { + (err: Error | null, reqOpts?: DecorateRequestOptions): void; +} +export interface GoogleErrorBody { + code: number; + errors?: GoogleInnerError[]; + response: r.Response; + message?: string; +} +export interface GoogleInnerError { + reason?: string; + message?: string; +} +export interface MakeWritableStreamOptions { + /** + * A connection instance used to get a token with and send the request + * through. + */ + connection?: {}; + /** + * Metadata to send at the head of the request. + */ + metadata?: { + contentType?: string; + }; + /** + * Request object, in the format of a standard Node.js http.request() object. + */ + request?: r.Options; + makeAuthenticatedRequest(reqOpts: r.OptionsWithUri & { + [GCCL_GCS_CMD_KEY]?: string; + }, fnobj: { + onAuthenticated(err: Error | null, authenticatedReqOpts?: r.Options): void; + }): void; +} +export interface DecorateRequestOptions extends r.CoreOptions { + autoPaginate?: boolean; + autoPaginateVal?: boolean; + objectMode?: boolean; + maxRetries?: number; + uri: string; + interceptors_?: Interceptor[]; + shouldReturnStream?: boolean; + projectId?: string; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface ParsedHttpResponseBody { + body: ResponseBody; + err?: Error; +} +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +export declare class ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(errorMessage: string); + constructor(errorBody: GoogleErrorBody); + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err: GoogleErrorBody, errors?: GoogleInnerError[]): string; +} +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +export declare class PartialFailureError extends Error { + errors?: GoogleInnerError[]; + response?: r.Response; + constructor(b: GoogleErrorBody); +} +export interface BodyResponseCallback { + (err: Error | ApiError | null, body?: ResponseBody, res?: r.Response): void; +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; +} +export interface MakeRequestConfig { + /** + * Automatically retry requests if the response is related to rate limits or + * certain intermittent server errors. We will exponentially backoff + * subsequent requests by default. (default: true) + */ + autoRetry?: boolean; + /** + * Maximum number of automatic retries attempted before returning the error. + * (default: 3) + */ + maxRetries?: number; + retries?: number; + retryOptions?: RetryOptions; + stream?: Duplexify; + shouldRetryFn?: (response?: r.Response) => boolean; +} +export declare class Util { + ApiError: typeof ApiError; + PartialFailureError: typeof PartialFailureError; + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop(): void; + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err: Error | null, resp?: r.Response | null, body?: ResponseBody, callback?: BodyResponseCallback): void; + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage: r.Response): ParsedHttpRespMessage; + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body: ResponseBody): ParsedHttpResponseBody; + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup: Duplexify, options: MakeWritableStreamOptions, onComplete?: Function): void; + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err?: ApiError): boolean; + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config: MakeAuthenticatedRequestFactoryConfig): MakeAuthenticatedRequest; + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts: DecorateRequestOptions, config: MakeRequestConfig, callback: BodyResponseCallback): void | Abortable; + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts: DecorateRequestOptions, projectId: string): DecorateRequestOptions; + isCustomType(unknown: any, module: string): boolean; + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback void>(optionsOrCallback?: T | C, cb?: C): [T, C]; + _getDefaultHeaders(gcclGcsCmd?: string): { + 'User-Agent': string; + 'x-goog-api-client': string; + }; +} +declare const util: Util; +export { util }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.js b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.js new file mode 100644 index 0000000..6f99d97 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/nodejs-common/util.js @@ -0,0 +1,669 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*! + * @module common/util + */ +import { replaceProjectIdToken, MissingProjectIdError, } from '@google-cloud/projectify'; +import * as ent from 'ent'; +import { GoogleAuth } from 'google-auth-library'; +import retryRequest from 'retry-request'; +import { Transform } from 'stream'; +import { teenyRequest } from 'teeny-request'; +import * as uuid from 'uuid'; +import { DEFAULT_PROJECT_ID_TOKEN } from './service.js'; +import { getModuleFormat, getRuntimeTrackingString, getUserAgentString, } from '../util.js'; +import duplexify from 'duplexify'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import { getPackageJSON } from '../package-json-helper.cjs'; +const packageJson = getPackageJSON(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +export const GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +export class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(ent.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +export class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +export class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[GCCL_GCS_CMD_KEY]); + const request = teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = duplexify(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[GCCL_GCS_CMD_KEY]); + const options = { + request: teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return retryRequest(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = retryRequest(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = replaceProjectIdToken(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return replaceProjectIdToken(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = replaceProjectIdToken(reqOpts.json, projectId); + } + reqOpts.uri = replaceProjectIdToken(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': getUserAgentString(), + 'x-goog-api-client': `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +export { util }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/notification.d.ts b/node_modules/@google-cloud/storage/build/esm/src/notification.d.ts new file mode 100644 index 0000000..0d3341a --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/notification.d.ts @@ -0,0 +1,106 @@ +import { BaseMetadata, ServiceObject } from './nodejs-common/index.js'; +import { ResponseBody } from './nodejs-common/util.js'; +import { Bucket } from './bucket.js'; +export interface DeleteNotificationOptions { + userProject?: string; +} +export interface GetNotificationMetadataOptions { + userProject?: string; +} +/** + * @typedef {array} GetNotificationMetadataResponse + * @property {object} 0 The notification metadata. + * @property {object} 1 The full API response. + */ +export type GetNotificationMetadataResponse = [ResponseBody, unknown]; +/** + * @callback GetNotificationMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} files The notification metadata. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationMetadataCallback { + (err: Error | null, metadata?: ResponseBody, apiResponse?: unknown): void; +} +/** + * @typedef {array} GetNotificationResponse + * @property {Notification} 0 The {@link Notification} + * @property {object} 1 The full API response. + */ +export type GetNotificationResponse = [Notification, unknown]; +export interface GetNotificationOptions { + /** + * Automatically create the object if it does not exist. Default: `false`. + */ + autoCreate?: boolean; + /** + * The ID of the project which will be billed for the request. + */ + userProject?: string; +} +/** + * @callback GetNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The {@link Notification}. + * @param {object} apiResponse The full API response. + */ +export interface GetNotificationCallback { + (err: Error | null, notification?: Notification | null, apiResponse?: unknown): void; +} +/** + * @callback DeleteNotificationCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ +export interface DeleteNotificationCallback { + (err: Error | null, apiResponse?: unknown): void; +} +export interface NotificationMetadata extends BaseMetadata { + custom_attributes?: { + [key: string]: string; + }; + event_types?: string[]; + object_name_prefix?: string; + payload_format?: 'JSON_API_V1' | 'NONE'; + topic?: string; +} +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +declare class Notification extends ServiceObject { + constructor(bucket: Bucket, id: string); +} +/** + * Reference to the {@link Notification} class. + * @name module:@google-cloud/storage.Notification + * @see Notification + */ +export { Notification }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/notification.js b/node_modules/@google-cloud/storage/build/esm/src/notification.js new file mode 100644 index 0000000..682defe --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/notification.js @@ -0,0 +1,267 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { ServiceObject } from './nodejs-common/index.js'; +import { promisifyAll } from '@google-cloud/promisify'; +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Notification); +/** + * Reference to the {@link Notification} class. + * @name module:@google-cloud/storage.Notification + * @see Notification + */ +export { Notification }; diff --git a/node_modules/@google-cloud/storage/build/esm/src/package-json-helper.cjs b/node_modules/@google-cloud/storage/build/esm/src/package-json-helper.cjs new file mode 100644 index 0000000..794923b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/package-json-helper.cjs @@ -0,0 +1,21 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return require('../../../package.json'); +} + +exports.getPackageJSON = getPackageJSON; diff --git a/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.d.ts b/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.d.ts new file mode 100644 index 0000000..d88fe04 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.d.ts @@ -0,0 +1,339 @@ +/// +/// +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { GoogleAuthOptions } from 'google-auth-library'; +import { Writable, WritableOptions } from 'stream'; +import { RetryOptions, PreconditionOptions } from './storage.js'; +import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +import { FileMetadata } from './file.js'; +export declare const PROTOCOL_REGEX: RegExp; +export interface ErrorWithCode extends Error { + code: number; + status?: number | string; +} +export type CreateUriCallback = (err: Error | null, uri?: string) => void; +export interface Encryption { + key: {}; + hash: {}; +} +export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead'; +export interface QueryParameters extends PreconditionOptions { + contentEncoding?: string; + kmsKeyName?: string; + predefinedAcl?: PredefinedAcl; + projection?: 'full' | 'noAcl'; + userProject?: string; +} +export interface UploadConfig extends Pick { + /** + * The API endpoint used for the request. + * Defaults to `storage.googleapis.com`. + * + * **Warning**: + * If this value does not match the current GCP universe an emulator context + * will be assumed and authentication will be bypassed. + */ + apiEndpoint?: string; + /** + * The name of the destination bucket. + */ + bucket: string; + /** + * The name of the destination file. + */ + file: string; + /** + * The GoogleAuthOptions passed to google-auth-library + */ + authConfig?: GoogleAuthOptions; + /** + * If you want to re-use an auth client from google-auto-auth, pass an + * instance here. + * Defaults to GoogleAuth and gets automatically overridden if an + * emulator context is detected. + */ + authClient?: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + /** + * Create a separate request per chunk. + * + * This value is in bytes and should be a multiple of 256 KiB (2^18). + * We recommend using at least 8 MiB for the chunk size. + * + * @link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + chunkSize?: number; + /** + * For each API request we send, you may specify custom request options that + * we'll add onto the request. The request options follow the gaxios API: + * https://github.com/googleapis/gaxios#request-options. + */ + customRequestOptions?: GaxiosOptions; + /** + * This will cause the upload to fail if the current generation of the remote + * object does not match the one provided here. + */ + generation?: number; + /** + * Set to `true` if the upload is only a subset of the overall object to upload. + * This can be used when planning to continue the upload an object in another + * session. + * + * **Must be used with {@link UploadConfig.chunkSize} != `0`**. + * + * If this is a continuation of a previous upload, {@link UploadConfig.offset} + * should be set. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + */ + isPartialUpload?: boolean; + /** + * A customer-supplied encryption key. See + * https://cloud.google.com/storage/docs/encryption#customer-supplied. + */ + key?: string | Buffer; + /** + * Resource name of the Cloud KMS key, of the form + * `projects/my-project/locations/global/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overrides the object metadata's + * `kms_key_name` value, if any. + */ + kmsKeyName?: string; + /** + * Any metadata you wish to set on the object. + */ + metadata?: ConfigMetadata; + /** + * The starting byte in relation to the final uploaded object. + * **Must be used with {@link UploadConfig.uri}**. + * + * If resuming an interrupted stream, do not supply this argument unless you + * know the exact number of bytes the service has AND the provided stream's + * first byte is a continuation from that provided offset. If resuming an + * interrupted stream and this option has not been provided, we will treat + * the provided upload stream as the object to upload - where the first byte + * of the upload stream is the first byte of the object to upload; skipping + * any bytes that are already present on the server. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + * @see {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.} + */ + offset?: number; + /** + * Set an Origin header when creating the resumable upload URI. + */ + origin?: string; + /** + * Specify query parameters that go along with the initial upload request. See + * https://cloud.google.com/storage/docs/json_api/v1/objects/insert#parameters + */ + params?: QueryParameters; + /** + * Apply a predefined set of access controls to the created file. + */ + predefinedAcl?: PredefinedAcl; + /** + * Make the uploaded file private. (Alias for config.predefinedAcl = + * 'private') + */ + private?: boolean; + /** + * Make the uploaded file public. (Alias for config.predefinedAcl = + * 'publicRead') + */ + public?: boolean; + /** + * The service domain for a given Cloud universe. + */ + universeDomain?: string; + /** + * If you already have a resumable URI from a previously-created resumable + * upload, just pass it in here and we'll use that. + * + * If resuming an interrupted stream and the {@link UploadConfig.offset} + * option has not been provided, we will treat the provided upload stream as + * the object to upload - where the first byte of the upload stream is the + * first byte of the object to upload; skipping any bytes that are already + * present on the server. + * + * @see {@link checkUploadStatus} for checking the status of an existing upload. + */ + uri?: string; + /** + * If the bucket being accessed has requesterPays functionality enabled, this + * can be set to control which project is billed for the access of this file. + */ + userProject?: string; + /** + * Configuration options for retrying retryable errors. + */ + retryOptions: RetryOptions; + [GCCL_GCS_CMD_KEY]?: string; +} +export interface ConfigMetadata { + [key: string]: any; + /** + * Set the length of the object being uploaded. If uploading a partial + * object, this is the overall size of the finalized object. + */ + contentLength?: number; + /** + * Set the content type of the incoming data. + */ + contentType?: string; +} +export interface GoogleInnerError { + reason?: string; +} +export interface ApiError extends Error { + code?: number; + errors?: GoogleInnerError[]; +} +export interface CheckUploadStatusConfig { + /** + * Set to `false` to disable retries within this method. + * + * @defaultValue `true` + */ + retry?: boolean; +} +export declare class Upload extends Writable { + #private; + bucket: string; + file: string; + apiEndpoint: string; + baseURI: string; + authConfig?: { + scopes?: string[]; + }; + authClient: { + request: (opts: GaxiosOptions) => Promise> | GaxiosPromise; + }; + cacheKey: string; + chunkSize?: number; + customRequestOptions: GaxiosOptions; + generation?: number; + key?: string | Buffer; + kmsKeyName?: string; + metadata: ConfigMetadata; + offset?: number; + origin?: string; + params: QueryParameters; + predefinedAcl?: PredefinedAcl; + private?: boolean; + public?: boolean; + uri?: string; + userProject?: string; + encryption?: Encryption; + uriProvidedManually: boolean; + numBytesWritten: number; + numRetries: number; + contentLength: number | '*'; + retryOptions: RetryOptions; + timeOfFirstRequest: number; + isPartialUpload: boolean; + private currentInvocationId; + /** + * A cache of buffers written to this instance, ready for consuming + */ + private writeBuffers; + private numChunksReadInRequest; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + private localWriteCache; + private localWriteCacheByteLength; + private upstreamEnded; + constructor(cfg: UploadConfig); + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent?: () => void): void; + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk: Buffer | string, encoding: BufferEncoding, readCallback?: () => void): void; + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + private prependLocalBufferToUpstream; + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + private pullFromChunkBuffer; + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + private waitForNextChunk; + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + private upstreamIterator; + createURI(): Promise; + createURI(callback: CreateUriCallback): void; + protected createURIAsync(): Promise; + private continueUploading; + startUploading(): Promise; + private responseHandler; + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + checkUploadStatus(config?: CheckUploadStatusConfig): Promise>; + private getAndSetOffset; + private makeRequest; + private makeRequestStream; + /** + * @return {bool} is the request good? + */ + private onResponse; + /** + * @param resp GaxiosResponse object from previous attempt + */ + private attemptDelayedRetry; + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + private getRetryDelay; + private sanitizeEndpoint; + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status: number): boolean; +} +export declare function upload(cfg: UploadConfig): Upload; +export declare function createURI(cfg: UploadConfig): Promise; +export declare function createURI(cfg: UploadConfig, callback: CreateUriCallback): void; +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +export declare function checkUploadStatus(cfg: UploadConfig & Required>): Promise>; diff --git a/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.js b/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.js new file mode 100644 index 0000000..841f5e7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/resumable-upload.js @@ -0,0 +1,853 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +import AbortController from 'abort-controller'; +import { createHash } from 'crypto'; +import * as gaxios from 'gaxios'; +import { DEFAULT_UNIVERSE, GoogleAuth, } from 'google-auth-library'; +import { Readable, Writable } from 'stream'; +import AsyncRetry from 'async-retry'; +import * as uuid from 'uuid'; +import { getRuntimeTrackingString, getModuleFormat, getUserAgentString, } from './util.js'; +import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import { getPackageJSON } from './package-json-helper.cjs'; +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = getPackageJSON(); +export const PROTOCOL_REGEX = /^(\w*):\/\//; +export class Upload extends Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(DEFAULT_UNIVERSE.length + 1)) === + `.${DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: createHash('sha256').update(cfg.key).digest('base64'), + }; + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': getUserAgentString(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await AsyncRetry(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': getUserAgentString(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': getUserAgentString(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new AbortController(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${resp.data}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error('Retry limit exceeded - ' + resp.data)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +export function upload(cfg) { + return new Upload(cfg); +} +export function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +export function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/signer.d.ts b/node_modules/@google-cloud/storage/build/esm/src/signer.d.ts new file mode 100644 index 0000000..b15435e --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/signer.d.ts @@ -0,0 +1,148 @@ +/// +/// +import * as http from 'http'; +import { Storage } from './storage.js'; +import { GoogleAuth } from 'google-auth-library'; +type GoogleAuthLike = Pick; +/** + * @deprecated Use {@link GoogleAuth} instead + */ +export interface AuthClient { + sign(blobToSign: string): Promise; + getCredentials(): Promise<{ + client_email?: string; + }>; +} +export interface BucketI { + name: string; +} +export interface FileI { + name: string; +} +export interface Query { + [key: string]: string; +} +export interface GetSignedUrlConfigInternal { + expiration: number; + accessibleAt?: Date; + method: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + cname?: string; + contentMd5?: string; + contentType?: string; + bucket: string; + file?: string; + /** + * The host for the generated signed URL + * + * @example + * 'https://localhost:8080/' + */ + host?: string | URL; + /** + * An endpoint for generating the signed URL + * + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string | URL; +} +export interface SignerGetSignedUrlConfig { + method: 'GET' | 'PUT' | 'DELETE' | 'POST'; + expires: string | number | Date; + accessibleAt?: string | number | Date; + virtualHostedStyle?: boolean; + version?: 'v2' | 'v4'; + cname?: string; + extensionHeaders?: http.OutgoingHttpHeaders; + queryParams?: Query; + contentMd5?: string; + contentType?: string; + /** + * The host for the generated signed URL + * + * @example + * 'https://localhost:8080/' + */ + host?: string | URL; + /** + * An endpoint for generating the signed URL + * + * @example + * 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/' + */ + signingEndpoint?: string | URL; +} +export type SignerGetSignedUrlResponse = string; +export type GetSignedUrlResponse = [SignerGetSignedUrlResponse]; +export interface GetSignedUrlCallback { + (err: Error | null, url?: string): void; +} +export declare enum SignerExceptionMessages { + ACCESSIBLE_DATE_INVALID = "The accessible at date provided was invalid.", + EXPIRATION_BEFORE_ACCESSIBLE_DATE = "An expiration date cannot be before accessible date.", + X_GOOG_CONTENT_SHA256 = "The header X-Goog-Content-SHA256 must be a hexadecimal string." +} +/** + * @const {string} + * @deprecated - unused + */ +export declare const PATH_STYLED_HOST = "https://storage.googleapis.com"; +export declare class URLSigner { + private auth; + private bucket; + private file?; + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + private storage; + constructor(auth: AuthClient | GoogleAuthLike, bucket: BucketI, file?: FileI | undefined, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage?: Storage); + getSignedUrl(cfg: SignerGetSignedUrlConfig): Promise; + private getSignedUrlV2; + private getSignedUrlV4; + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers: http.OutgoingHttpHeaders): string; + getCanonicalRequest(method: string, path: string, query: string, headers: string, signedHeaders: string, contentSha256?: string): string; + getCanonicalQueryParams(query: Query): string; + getResourcePath(cname: boolean, bucket: string, file?: string): string; + parseExpires(expires: string | number | Date, current?: Date): number; + parseAccessibleAt(accessibleAt?: string | number | Date): number; +} +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +export declare class SigningError extends Error { + name: string; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/esm/src/signer.js b/node_modules/@google-cloud/storage/build/esm/src/signer.js new file mode 100644 index 0000000..f05c94e --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/signer.js @@ -0,0 +1,299 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import * as crypto from 'crypto'; +import * as url from 'url'; +import { ExceptionMessages, Storage } from './storage.js'; +import { encodeURI, qsStringify, objectEntries, formatAsUTCISO } from './util.js'; +export var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +export const PATH_STYLED_HOST = 'https://storage.googleapis.com'; +export class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? encodeURI(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = qsStringify(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = formatAsUTCISO(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = formatAsUTCISO(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = objectEntries(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return objectEntries(query) + .map(([key, value]) => [encodeURI(key, true), encodeURI(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +export class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/storage.d.ts b/node_modules/@google-cloud/storage/build/esm/src/storage.d.ts new file mode 100644 index 0000000..1a0ecb5 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/storage.d.ts @@ -0,0 +1,723 @@ +/// +import { ApiError, Service, ServiceOptions } from './nodejs-common/index.js'; +import { Readable } from 'stream'; +import { Bucket } from './bucket.js'; +import { Channel } from './channel.js'; +import { File } from './file.js'; +import { HmacKey, HmacKeyMetadata, HmacKeyOptions } from './hmacKey.js'; +import { CRC32CValidatorGenerator } from './crc32c.js'; +export interface GetServiceAccountOptions { + userProject?: string; +} +export interface ServiceAccount { + emailAddress?: string; +} +export type GetServiceAccountResponse = [ServiceAccount, unknown]; +export interface GetServiceAccountCallback { + (err: Error | null, serviceAccount?: ServiceAccount, apiResponse?: unknown): void; +} +export interface CreateBucketQuery { + project: string; + userProject: string; + enableObjectRetention: boolean; +} +export declare enum IdempotencyStrategy { + RetryAlways = 0, + RetryConditional = 1, + RetryNever = 2 +} +export interface RetryOptions { + retryDelayMultiplier?: number; + totalTimeout?: number; + maxRetryDelay?: number; + autoRetry?: boolean; + maxRetries?: number; + retryableErrorFn?: (err: ApiError) => boolean; + idempotencyStrategy?: IdempotencyStrategy; +} +export interface PreconditionOptions { + ifGenerationMatch?: number | string; + ifGenerationNotMatch?: number | string; + ifMetagenerationMatch?: number | string; + ifMetagenerationNotMatch?: number | string; +} +export interface StorageOptions extends ServiceOptions { + /** + * The API endpoint of the service used to make requests. + * Defaults to `storage.googleapis.com`. + */ + apiEndpoint?: string; + crc32cGenerator?: CRC32CValidatorGenerator; + retryOptions?: RetryOptions; +} +export interface BucketOptions { + crc32cGenerator?: CRC32CValidatorGenerator; + kmsKeyName?: string; + preconditionOpts?: PreconditionOptions; + userProject?: string; +} +export interface Cors { + maxAgeSeconds?: number; + method?: string[]; + origin?: string[]; + responseHeader?: string[]; +} +interface Versioning { + enabled: boolean; +} +/** + * Custom placement configuration. + * Initially used for dual-region buckets. + **/ +export interface CustomPlacementConfig { + dataLocations?: string[]; +} +export interface AutoclassConfig { + enabled?: boolean; + terminalStorageClass?: 'NEARLINE' | 'ARCHIVE'; +} +export interface CreateBucketRequest { + archive?: boolean; + autoclass?: AutoclassConfig; + coldline?: boolean; + cors?: Cors[]; + customPlacementConfig?: CustomPlacementConfig; + dra?: boolean; + enableObjectRetention?: boolean; + location?: string; + multiRegional?: boolean; + nearline?: boolean; + regional?: boolean; + requesterPays?: boolean; + retentionPolicy?: object; + rpo?: string; + standard?: boolean; + storageClass?: string; + userProject?: string; + versioning?: Versioning; +} +export type CreateBucketResponse = [Bucket, unknown]; +export interface BucketCallback { + (err: Error | null, bucket?: Bucket | null, apiResponse?: unknown): void; +} +export type GetBucketsResponse = [Bucket[], {}, unknown]; +export interface GetBucketsCallback { + (err: Error | null, buckets: Bucket[], nextQuery?: {}, apiResponse?: unknown): void; +} +export interface GetBucketsRequest { + prefix?: string; + project?: string; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface HmacKeyResourceResponse { + metadata: HmacKeyMetadata; + secret: string; +} +export type CreateHmacKeyResponse = [HmacKey, string, HmacKeyResourceResponse]; +export interface CreateHmacKeyOptions { + projectId?: string; + userProject?: string; +} +export interface CreateHmacKeyCallback { + (err: Error | null, hmacKey?: HmacKey | null, secret?: string | null, apiResponse?: HmacKeyResourceResponse): void; +} +export interface GetHmacKeysOptions { + projectId?: string; + serviceAccountEmail?: string; + showDeletedKeys?: boolean; + autoPaginate?: boolean; + maxApiCalls?: number; + maxResults?: number; + pageToken?: string; + userProject?: string; +} +export interface GetHmacKeysCallback { + (err: Error | null, hmacKeys: HmacKey[] | null, nextQuery?: {}, apiResponse?: unknown): void; +} +export declare enum ExceptionMessages { + EXPIRATION_DATE_INVALID = "The expiration date provided was invalid.", + EXPIRATION_DATE_PAST = "An expiration date cannot be in the past." +} +export declare enum StorageExceptionMessages { + BUCKET_NAME_REQUIRED = "A bucket name is needed to use Cloud Storage.", + BUCKET_NAME_REQUIRED_CREATE = "A name is required to create a bucket.", + HMAC_SERVICE_ACCOUNT = "The first argument must be a service account email to create an HMAC key.", + HMAC_ACCESS_ID = "An access ID is needed to create an HmacKey object." +} +export type GetHmacKeysResponse = [HmacKey[]]; +export declare const PROTOCOL_REGEX: RegExp; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +export declare const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +export declare const MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +export declare const RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +export declare const TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +export declare const MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +export declare const RETRYABLE_ERR_FN_DEFAULT: (err?: ApiError) => boolean; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +export declare class Storage extends Service { + /** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ + static Bucket: typeof Bucket; + /** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ + static Channel: typeof Channel; + /** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ + static File: typeof File; + /** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ + static HmacKey: typeof HmacKey; + static acl: { + OWNER_ROLE: string; + READER_ROLE: string; + WRITER_ROLE: string; + }; + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + acl: typeof Storage.acl; + crc32cGenerator: CRC32CValidatorGenerator; + getBucketsStream(): Readable; + getHmacKeysStream(): Readable; + retryOptions: RetryOptions; + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options?: StorageOptions); + private static sanitizeEndpoint; + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name: string, options?: BucketOptions): Bucket; + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id: string, resourceId: string): Channel; + createBucket(name: string, metadata?: CreateBucketRequest): Promise; + createBucket(name: string, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createBucket(name: string, metadata: CreateBucketRequest, callback: BucketCallback): void; + createHmacKey(serviceAccountEmail: string, options?: CreateHmacKeyOptions): Promise; + createHmacKey(serviceAccountEmail: string, callback: CreateHmacKeyCallback): void; + createHmacKey(serviceAccountEmail: string, options: CreateHmacKeyOptions, callback: CreateHmacKeyCallback): void; + getBuckets(options?: GetBucketsRequest): Promise; + getBuckets(options: GetBucketsRequest, callback: GetBucketsCallback): void; + getBuckets(callback: GetBucketsCallback): void; + /** + * Query object for listing HMAC keys. + * + * @typedef {object} GetHmacKeysOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [serviceAccountEmail] If present, only HMAC keys for the + * given service account are returned. + * @property {boolean} [showDeletedKeys=false] If true, include keys in the DELETE + * state. Default is false. + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} GetHmacKeysResponse + * @property {HmacKey[]} 0 Array of {@link HmacKey} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetHmacKeysCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey[]} hmacKeys Array of {@link HmacKey} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Retrieves a list of HMAC keys matching the criteria. + * + * The authenticated user must have storage.hmacKeys.list permission for the project in which the key exists. + * + * @param {GetHmacKeysOption} options Configuration options. + * @param {GetHmacKeysCallback} callback Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getHmacKeys(function(err, hmacKeys) { + * if (!err) { + * // hmacKeys is an array of HmacKey objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, hmacKeys, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getHmacKeys(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * hmacKeys[0].metadata; + * }; + * + * storage.getHmacKeys({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getHmacKeys().then(function(data) { + * const hmacKeys = data[0]; + * }); + * ``` + */ + getHmacKeys(options?: GetHmacKeysOptions): Promise; + getHmacKeys(callback: GetHmacKeysCallback): void; + getHmacKeys(options: GetHmacKeysOptions, callback: GetHmacKeysCallback): void; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options?: GetServiceAccountOptions): Promise; + getServiceAccount(options: GetServiceAccountOptions, callback: GetServiceAccountCallback): void; + getServiceAccount(callback: GetServiceAccountCallback): void; + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId: string, options?: HmacKeyOptions): HmacKey; +} +export {}; diff --git a/node_modules/@google-cloud/storage/build/esm/src/storage.js b/node_modules/@google-cloud/storage/build/esm/src/storage.js new file mode 100644 index 0000000..4d37108 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/storage.js @@ -0,0 +1,1144 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import { Service } from './nodejs-common/index.js'; +import { paginator } from '@google-cloud/paginator'; +import { promisifyAll } from '@google-cloud/promisify'; +import { Readable } from 'stream'; +import { Bucket } from './bucket.js'; +import { Channel } from './channel.js'; +import { File } from './file.js'; +import { normalize } from './util.js'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import { getPackageJSON } from './package-json-helper.cjs'; +import { HmacKey } from './hmacKey.js'; +import { CRC32C_DEFAULT_VALIDATOR_GENERATOR, } from './crc32c.js'; +import { DEFAULT_UNIVERSE } from 'google-auth-library'; +export var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (IdempotencyStrategy = {})); +export var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (ExceptionMessages = {})); +export var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (StorageExceptionMessages = {})); +export const PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +export const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +export const MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +export const RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +export const TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +export const MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +export const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +export class Storage extends Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: getPackageJSON(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] **Early Access Testers Only** + * Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = normalize(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = normalize(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = normalize(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = normalize(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new HmacKey(this, accessId, options); + } +} +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +promisifyAll(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); diff --git a/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.d.ts b/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.d.ts new file mode 100644 index 0000000..9bf8fd7 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.d.ts @@ -0,0 +1,246 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +import { Bucket, UploadOptions, UploadResponse } from './bucket.js'; +import { DownloadOptions, DownloadResponse, File } from './file.js'; +import { GaxiosResponse } from 'gaxios'; +export interface UploadManyFilesOptions { + concurrencyLimit?: number; + skipIfExists?: boolean; + prefix?: string; + passthroughOptions?: Omit; +} +export interface DownloadManyFilesOptions { + concurrencyLimit?: number; + prefix?: string; + stripPrefix?: string; + passthroughOptions?: DownloadOptions; +} +export interface DownloadFileInChunksOptions { + concurrencyLimit?: number; + chunkSizeBytes?: number; + destination?: string; + validation?: 'crc32c' | false; + noReturnData?: boolean; +} +export interface UploadFileInChunksOptions { + concurrencyLimit?: number; + chunkSizeBytes?: number; + uploadName?: string; + maxQueueSize?: number; + uploadId?: string; + autoAbortFailure?: boolean; + partsMap?: Map; + validation?: 'md5' | false; + headers?: { + [key: string]: string; + }; +} +export interface MultiPartUploadHelper { + bucket: Bucket; + fileName: string; + uploadId?: string; + partsMap?: Map; + initiateUpload(headers?: { + [key: string]: string; + }): Promise; + uploadPart(partNumber: number, chunk: Buffer, validation?: 'md5' | false): Promise; + completeUpload(): Promise; + abortUpload(): Promise; +} +export type MultiPartHelperGenerator = (bucket: Bucket, fileName: string, uploadId?: string, partsMap?: Map) => MultiPartUploadHelper; +export declare class MultiPartUploadError extends Error { + private uploadId; + private partsMap; + constructor(message: string, uploadId: string, partsMap: Map); +} +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +export declare class TransferManager { + bucket: Bucket; + constructor(bucket: Bucket); + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + uploadManyFiles(filePathsOrDirectory: string[] | string, options?: UploadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + downloadManyFiles(filesOrFolder: File[] | string[] | string, options?: DownloadManyFilesOptions): Promise; + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise; + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + uploadFileInChunks(filePath: string, options?: UploadFileInChunksOptions, generator?: MultiPartHelperGenerator): Promise; + private getPathsFromDirectory; +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.js b/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.js new file mode 100644 index 0000000..d712344 --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/transfer-manager.js @@ -0,0 +1,633 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +import { FileExceptionMessages, RequestError, } from './file.js'; +import pLimit from 'p-limit'; +import * as path from 'path'; +import { createReadStream, promises as fsp } from 'fs'; +import { CRC32C } from './crc32c.js'; +import { GoogleAuth } from 'google-auth-library'; +import { XMLParser, XMLBuilder } from 'fast-xml-parser'; +import AsyncRetry from 'async-retry'; +import { createHash } from 'crypto'; +import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js'; +import { getRuntimeTrackingString, getUserAgentString } from './util.js'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import { getPackageJSON } from './package-json-helper.cjs'; +const packageJson = getPackageJSON(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +export class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return AsyncRetry(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = createHash('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return AsyncRetry(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return AsyncRetry(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return AsyncRetry(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${getRuntimeTrackingString()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = getUserAgentString(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +export class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fsp.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = filePath + .split(path.sep) + .join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + promises.push(limit(() => file.download(passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = pLimit(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fsp.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new RequestError(FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = createReadStream(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fsp.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/util.d.ts b/node_modules/@google-cloud/storage/build/esm/src/util.d.ts new file mode 100644 index 0000000..6f3a18b --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/util.d.ts @@ -0,0 +1,88 @@ +/// +/// +/// +import * as querystring from 'querystring'; +import { PassThrough } from 'stream'; +export declare function normalize(optionsOrCallback?: T | U, cb?: U): { + options: T; + callback: U; +}; +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +export declare function objectEntries(obj: { + [key: string]: T; +}): Array<[string, T]>; +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +export declare function fixedEncodeURIComponent(str: string): string; +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +export declare function encodeURI(uri: string, encodeSlash: boolean): string; +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +export declare function qsStringify(qs: querystring.ParsedUrlQueryInput): string; +export declare function objectKeyToLowercase(object: { + [key: string]: T; +}): { + [key: string]: T; +}; +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +export declare function unicodeJSONStringify(obj: object): string; +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +export declare function convertObjKeysToSnakeCase(obj: object): object; +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +export declare function formatAsUTCISO(dateTimeToFormat: Date, includeTime?: boolean, dateDelimiter?: string, timeDelimiter?: string): string; +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +export declare function getRuntimeTrackingString(): string; +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +export declare function getUserAgentString(): string; +export declare function getDirName(): string; +export declare function getModuleFormat(): "ESM" | "CJS"; +export declare class PassThroughShim extends PassThrough { + private shouldEmitReading; + private shouldEmitWriting; + _read(size: number): void; + _write(chunk: never, encoding: BufferEncoding, callback: (error?: Error | null | undefined) => void): void; + _final(callback: (error?: Error | null | undefined) => void): void; +} diff --git a/node_modules/@google-cloud/storage/build/esm/src/util.js b/node_modules/@google-cloud/storage/build/esm/src/util.js new file mode 100644 index 0000000..120c7da --- /dev/null +++ b/node_modules/@google-cloud/storage/build/esm/src/util.js @@ -0,0 +1,229 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +import * as path from 'path'; +import * as querystring from 'querystring'; +import { PassThrough } from 'stream'; +import * as url from 'url'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +import { getPackageJSON } from './package-json-helper.cjs'; +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = true; +export function normalize(optionsOrCallback, cb) { + const options = (typeof optionsOrCallback === 'object' ? optionsOrCallback : {}); + const callback = (typeof optionsOrCallback === 'function' ? optionsOrCallback : cb); + return { options, callback }; +} +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +export function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +export function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +export function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri + .split('/') + .map(fixedEncodeURIComponent) + .join(encodeSlash ? '%2F' : '/'); +} +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +export function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: (component) => encodeURI(component, true), + }); +} +export function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +export function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, (char) => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +export function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +export function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month + .toString() + .padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour + .toString() + .padStart(2, '0')}${timeDelimiter}${minute + .toString() + .padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +export function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } + else { + return `gl-node/${process.versions.node}`; + } +} +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +export function getUserAgentString() { + const pkg = getPackageJSON(); + const hyphenatedPackageName = pkg.name + .replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +export function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } + catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = path.dirname(fileURLToPath(import.meta.url)); + } + return dirToUse; +} +export function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +export class PassThroughShim extends PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} diff --git a/node_modules/@google-cloud/storage/node_modules/.bin/mime b/node_modules/@google-cloud/storage/node_modules/.bin/mime new file mode 100644 index 0000000..0a62a1b --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/.bin/mime @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../mime/cli.js" "$@" +else + exec node "$basedir/../mime/cli.js" "$@" +fi diff --git a/node_modules/@google-cloud/storage/node_modules/.bin/mime.cmd b/node_modules/@google-cloud/storage/node_modules/.bin/mime.cmd new file mode 100644 index 0000000..54491f1 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/.bin/mime.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mime\cli.js" %* diff --git a/node_modules/@google-cloud/storage/node_modules/.bin/mime.ps1 b/node_modules/@google-cloud/storage/node_modules/.bin/mime.ps1 new file mode 100644 index 0000000..2222f40 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/.bin/mime.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../mime/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../mime/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../mime/cli.js" $args + } else { + & "node$exe" "$basedir/../mime/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md b/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md new file mode 100644 index 0000000..cdf9be5 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md @@ -0,0 +1,312 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.0](https://github.com/broofa/mime/compare/v2.6.0...v3.0.0) (2021-11-03) + + +### ⚠ BREAKING CHANGES + +* drop support for node < 10.x + +### Bug Fixes + +* skypack.dev for direct browser import, fixes [#263](https://github.com/broofa/mime/issues/263) ([41db4c0](https://github.com/broofa/mime/commit/41db4c042ccf50ea7baf3d2160ea37dcca37998d)) + + +### update + +* drop support for node < 10.x ([8857363](https://github.com/broofa/mime/commit/8857363ae0446ed0229b17291cf4483cf801f0d0)) + +## [2.6.0](https://github.com/broofa/mime/compare/v2.5.2...v2.6.0) (2021-11-02) + + +### Features + +* mime-db@1.50.0 ([cef0cc4](https://github.com/broofa/mime/commit/cef0cc484ff6d05ff1e12b54ca3e8b856fbc14d8)) + +### [2.5.2](https://github.com/broofa/mime/compare/v2.5.0...v2.5.2) (2021-02-17) + + +### Bug Fixes + +* update to mime-db@1.46.0, fixes [#253](https://github.com/broofa/mime/issues/253) ([f10e6aa](https://github.com/broofa/mime/commit/f10e6aa62e1356de7e2491d7fb4374c8dac65800)) + +## [2.5.0](https://github.com/broofa/mime/compare/v2.4.7...v2.5.0) (2021-01-16) + + +### Features + +* improved CLI ([#244](https://github.com/broofa/mime/issues/244)) ([c8a8356](https://github.com/broofa/mime/commit/c8a8356e3b27f3ef46b64b89b428fdb547b14d5f)) + +### [2.4.7](https://github.com/broofa/mime/compare/v2.4.6...v2.4.7) (2020-12-16) + + +### Bug Fixes + +* update to latest mime-db ([43b09ef](https://github.com/broofa/mime/commit/43b09eff0233eacc449af2b1f99a19ba9e104a44)) + +### [2.4.6](https://github.com/broofa/mime/compare/v2.4.5...v2.4.6) (2020-05-27) + + +### Bug Fixes + +* add cli.js to package.json files ([#237](https://github.com/broofa/mime/issues/237)) ([6c070bc](https://github.com/broofa/mime/commit/6c070bc298fa12a48e2ed126fbb9de641a1e7ebc)) + +### [2.4.5](https://github.com/broofa/mime/compare/v2.4.4...v2.4.5) (2020-05-01) + + +### Bug Fixes + +* fix [#236](https://github.com/broofa/mime/issues/236) ([7f4ecd0](https://github.com/broofa/mime/commit/7f4ecd0d850ed22c9e3bfda2c11fc74e4dde12a7)) +* update to latest mime-db ([c5cb3f2](https://github.com/broofa/mime/commit/c5cb3f2ab8b07642a066efbde1142af1b90c927b)) + +### [2.4.4](https://github.com/broofa/mime/compare/v2.4.3...v2.4.4) (2019-06-07) + + + +### [2.4.3](https://github.com/broofa/mime/compare/v2.4.2...v2.4.3) (2019-05-15) + + + +### [2.4.2](https://github.com/broofa/mime/compare/v2.4.1...v2.4.2) (2019-04-07) + + +### Bug Fixes + +* don't use arrow function introduced in 2.4.1 ([2e00b5c](https://github.com/broofa/mime/commit/2e00b5c)) + + + +### [2.4.1](https://github.com/broofa/mime/compare/v2.4.0...v2.4.1) (2019-04-03) + + +### Bug Fixes + +* update MDN and mime-db types ([3e567a9](https://github.com/broofa/mime/commit/3e567a9)) + + + +# [2.4.0](https://github.com/broofa/mime/compare/v2.3.1...v2.4.0) (2018-11-26) + + +### Features + +* Bind exported methods ([9d2a7b8](https://github.com/broofa/mime/commit/9d2a7b8)) +* update to mime-db@1.37.0 ([49e6e41](https://github.com/broofa/mime/commit/49e6e41)) + + + +### [2.3.1](https://github.com/broofa/mime/compare/v2.3.0...v2.3.1) (2018-04-11) + + +### Bug Fixes + +* fix [#198](https://github.com/broofa/mime/issues/198) ([25ca180](https://github.com/broofa/mime/commit/25ca180)) + + + +# [2.3.0](https://github.com/broofa/mime/compare/v2.2.2...v2.3.0) (2018-04-11) + + +### Bug Fixes + +* fix [#192](https://github.com/broofa/mime/issues/192) ([5c35df6](https://github.com/broofa/mime/commit/5c35df6)) + + +### Features + +* add travis-ci testing ([d64160f](https://github.com/broofa/mime/commit/d64160f)) + + + +### [2.2.2](https://github.com/broofa/mime/compare/v2.2.1...v2.2.2) (2018-03-30) + + +### Bug Fixes + +* update types files to mime-db@1.32.0 ([85aac16](https://github.com/broofa/mime/commit/85aac16)) + + +### [2.2.1](https://github.com/broofa/mime/compare/v2.2.0...v2.2.1) (2018-03-30) + + +### Bug Fixes + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([b5c83fb](https://github.com/broofa/mime/commit/b5c83fb)) + + + +# [2.2.0](https://github.com/broofa/mime/compare/v2.1.0...v2.2.0) (2018-01-04) + + +### Features + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([10f82ac](https://github.com/broofa/mime/commit/10f82ac)) + + + +# [2.1.0](https://github.com/broofa/mime/compare/v2.0.5...v2.1.0) (2017-12-22) + + +### Features + +* Upgrade to mime-db@1.32.0. Fixes [#185](https://github.com/broofa/mime/issues/185) ([3f775ba](https://github.com/broofa/mime/commit/3f775ba)) + + + +### [2.0.5](https://github.com/broofa/mime/compare/v2.0.1...v2.0.5) (2017-12-22) + + +### Bug Fixes + +* ES5 support (back to node v0.4) ([f14ccb6](https://github.com/broofa/mime/commit/f14ccb6)) + + + +# Changelog + +### v2.0.4 (24/11/2017) +- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/mime/issues/182) +- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/mime/issues/181) + +--- + +## v1.5.0 (22/11/2017) +- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/mime/issues/179) +- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/mime/issues/178) +- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/mime/issues/176) +- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/mime/issues/175) +- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/mime/issues/167) + +--- + +### v2.0.3 (25/09/2017) +*No changelog for this release.* + +--- + +### v1.4.1 (25/09/2017) +- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/mime/issues/172) + +--- + +### v2.0.2 (15/09/2017) +- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/mime/issues/165) +- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/mime/issues/164) +- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/mime/issues/163) +- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/mime/issues/162) +- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/mime/issues/161) +- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/mime/issues/160) +- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/mime/issues/152) +- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/mime/issues/139) +- [**V2**] reset mime-types [#124](https://github.com/broofa/mime/issues/124) +- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/mime/issues/113) + +--- + +### v2.0.1 (14/09/2017) +- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/mime/issues/171) +- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/mime/issues/170) + +--- + +## v2.0.0 (12/09/2017) +- [**closed**] woff and woff2 [#168](https://github.com/broofa/mime/issues/168) + +--- + +## v1.4.0 (28/08/2017) +- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/mime/issues/159) +- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/mime/issues/158) +- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/mime/issues/157) +- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/mime/issues/147) +- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/mime/issues/135) +- [**closed**] requested features [#131](https://github.com/broofa/mime/issues/131) +- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/mime/issues/129) +- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/mime/issues/120) +- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/mime/issues/118) +- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/mime/issues/108) +- [**closed**] don't make default_type global [#78](https://github.com/broofa/mime/issues/78) +- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/mime/issues/74) + +--- + +### v1.3.6 (11/05/2017) +- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/mime/issues/154) +- [**closed**] Error while installing mime [#153](https://github.com/broofa/mime/issues/153) +- [**closed**] application/manifest+json [#149](https://github.com/broofa/mime/issues/149) +- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/mime/issues/141) +- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/mime/issues/140) +- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/mime/issues/130) +- [**closed**] how to support plist? [#126](https://github.com/broofa/mime/issues/126) +- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/mime/issues/123) +- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/mime/issues/121) +- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/mime/issues/117) + +--- + +### v1.3.4 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.3 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.1 (05/02/2015) +- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/mime/issues/111) +- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/mime/issues/110) +- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/mime/issues/94) +- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/mime/issues/77) + +--- + +## v1.3.0 (05/02/2015) +- [**closed**] Add common name? [#114](https://github.com/broofa/mime/issues/114) +- [**closed**] application/x-yaml [#104](https://github.com/broofa/mime/issues/104) +- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/mime/issues/102) +- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/mime/issues/99) +- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/mime/issues/98) +- [**closed**] collaborators [#88](https://github.com/broofa/mime/issues/88) +- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/mime/issues/87) +- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/mime/issues/86) +- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/mime/issues/81) +- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/mime/issues/68) + +--- + +### v1.2.11 (15/08/2013) +- [**closed**] Update mime.types [#65](https://github.com/broofa/mime/issues/65) +- [**closed**] Publish a new version [#63](https://github.com/broofa/mime/issues/63) +- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/mime/issues/55) +- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/mime/issues/52) + +--- + +### v1.2.10 (25/07/2013) +- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/mime/issues/62) +- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/mime/issues/51) + +--- + +### v1.2.9 (17/01/2013) +- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/mime/issues/49) +- [**closed**] Please add semicolon [#46](https://github.com/broofa/mime/issues/46) +- [**closed**] parse full mime types [#43](https://github.com/broofa/mime/issues/43) + +--- + +### v1.2.8 (10/01/2013) +- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/mime/issues/47) +- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/mime/issues/45) + +--- + +### v1.2.7 (19/10/2012) +- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/mime/issues/41) +- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/mime/issues/36) +- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/mime/issues/30) +- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/mime/issues/27) diff --git a/node_modules/@google-cloud/storage/node_modules/mime/LICENSE b/node_modules/@google-cloud/storage/node_modules/mime/LICENSE new file mode 100644 index 0000000..d3f46f7 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 Benjamin Thomas, Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@google-cloud/storage/node_modules/mime/Mime.js b/node_modules/@google-cloud/storage/node_modules/mime/Mime.js new file mode 100644 index 0000000..969a66e --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/Mime.js @@ -0,0 +1,97 @@ +'use strict'; + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; diff --git a/node_modules/@google-cloud/storage/node_modules/mime/README.md b/node_modules/@google-cloud/storage/node_modules/mime/README.md new file mode 100644 index 0000000..fc816cb --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/README.md @@ -0,0 +1,178 @@ + +# Mime + +A comprehensive, compact MIME type module. + +[![Build Status](https://travis-ci.org/broofa/mime.svg?branch=master)](https://travis-ci.org/broofa/mime) + +## Install + +### NPM +``` +npm install mime +``` + +### Browser + +It is recommended that you use a bundler such as +[webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) to +package your code. However, browser-ready versions are available via +skypack.dev as follows: +``` +// Full version + +``` + +``` +// "lite" version + +``` + +## Quick Start + +For the full version (800+ MIME types, 1,000+ extensions): + +```javascript +const mime = require('mime'); + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getExtension('text/plain'); // ⇨ 'txt' +``` + +See [Mime API](#mime-api) below for API details. + +## Lite Version + +The "lite" version of this module omits vendor-specific (`*/vnd.*`) and +experimental (`*/x-*`) types. It weighs in at ~2.5KB, compared to 8KB for the +full version. To load the lite version: + +```javascript +const mime = require('mime/lite'); +``` + +## Mime .vs. mime-types .vs. mime-db modules + +For those of you wondering about the difference between these [popular] NPM modules, +here's a brief rundown ... + +[`mime-db`](https://github.com/jshttp/mime-db) is "the source of +truth" for MIME type information. It is not an API. Rather, it is a canonical +dataset of mime type definitions pulled from IANA, Apache, NGINX, and custom mappings +submitted by the Node.js community. + +[`mime-types`](https://github.com/jshttp/mime-types) is a thin +wrapper around mime-db that provides an API drop-in compatible(ish) with `mime @ < v1.3.6` API. + +`mime` is, as of v2, a self-contained module bundled with a pre-optimized version +of the `mime-db` dataset. It provides a simplified API with the following characteristics: + +* Intelligently resolved type conflicts (See [mime-score](https://github.com/broofa/mime-score) for details) +* Method naming consistent with industry best-practices +* Compact footprint. E.g. The minified+compressed sizes of the various modules: + +Module | Size +--- | --- +`mime-db` | 18 KB +`mime-types` | same as mime-db +`mime` | 8 KB +`mime/lite` | 2 KB + +## Mime API + +Both `require('mime')` and `require('mime/lite')` return instances of the MIME +class, documented below. + +Note: Inputs to this API are case-insensitive. Outputs (returned values) will +be lowercase. + +### new Mime(typeMap, ... more maps) + +Most users of this module will not need to create Mime instances directly. +However if you would like to create custom mappings, you may do so as follows +... + +```javascript +// Require Mime class +const Mime = require('mime/Mime'); + +// Define mime type -> extensions map +const typeMap = { + 'text/abc': ['abc', 'alpha', 'bet'], + 'text/def': ['leppard'] +}; + +// Create and use Mime instance +const myMime = new Mime(typeMap); +myMime.getType('abc'); // ⇨ 'text/abc' +myMime.getExtension('text/def'); // ⇨ 'leppard' +``` + +If more than one map argument is provided, each map is `define()`ed (see below), in order. + +### mime.getType(pathOrExtension) + +Get mime type for the given path or extension. E.g. + +```javascript +mime.getType('js'); // ⇨ 'application/javascript' +mime.getType('json'); // ⇨ 'application/json' + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getType('dir/text.txt'); // ⇨ 'text/plain' +mime.getType('dir\\text.txt'); // ⇨ 'text/plain' +mime.getType('.text.txt'); // ⇨ 'text/plain' +mime.getType('.txt'); // ⇨ 'text/plain' +``` + +`null` is returned in cases where an extension is not detected or recognized + +```javascript +mime.getType('foo/txt'); // ⇨ null +mime.getType('bogus_type'); // ⇨ null +``` + +### mime.getExtension(type) +Get extension for the given mime type. Charset options (often included in +Content-Type headers) are ignored. + +```javascript +mime.getExtension('text/plain'); // ⇨ 'txt' +mime.getExtension('application/json'); // ⇨ 'json' +mime.getExtension('text/html; charset=utf8'); // ⇨ 'html' +``` + +### mime.define(typeMap[, force = false]) + +Define [more] type mappings. + +`typeMap` is a map of type -> extensions, as documented in `new Mime`, above. + +By default this method will throw an error if you try to map a type to an +extension that is already assigned to another type. Passing `true` for the +`force` argument will suppress this behavior (overriding any previous mapping). + +```javascript +mime.define({'text/x-abc': ['abc', 'abcd']}); + +mime.getType('abcd'); // ⇨ 'text/x-abc' +mime.getExtension('text/x-abc') // ⇨ 'abc' +``` + +## Command Line + + mime [path_or_extension] + +E.g. + + > mime scripts/jquery.js + application/javascript + +---- +Markdown generated from [src/README_js.md](src/README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/mime/cli.js b/node_modules/@google-cloud/storage/node_modules/mime/cli.js new file mode 100644 index 0000000..ab70a49 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/cli.js @@ -0,0 +1,46 @@ +#!/usr/bin/env node + +'use strict'; + +process.title = 'mime'; +let mime = require('.'); +let pkg = require('./package.json'); +let args = process.argv.splice(2); + +if (args.includes('--version') || args.includes('-v') || args.includes('--v')) { + console.log(pkg.version); + process.exit(0); +} else if (args.includes('--name') || args.includes('-n') || args.includes('--n')) { + console.log(pkg.name); + process.exit(0); +} else if (args.includes('--help') || args.includes('-h') || args.includes('--h')) { + console.log(pkg.name + ' - ' + pkg.description + '\n'); + console.log(`Usage: + + mime [flags] [path_or_extension] + + Flags: + --help, -h Show this message + --version, -v Display the version + --name, -n Print the name of the program + + Note: the command will exit after it executes if a command is specified + The path_or_extension is the path to the file or the extension of the file. + + Examples: + mime --help + mime --version + mime --name + mime -v + mime src/log.js + mime new.py + mime foo.sh + `); + process.exit(0); +} + +let file = args[0]; +let type = mime.getType(file); + +process.stdout.write(type + '\n'); + diff --git a/node_modules/@google-cloud/storage/node_modules/mime/index.js b/node_modules/@google-cloud/storage/node_modules/mime/index.js new file mode 100644 index 0000000..fadcf8d --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/index.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard'), require('./types/other')); diff --git a/node_modules/@google-cloud/storage/node_modules/mime/lite.js b/node_modules/@google-cloud/storage/node_modules/mime/lite.js new file mode 100644 index 0000000..835cffb --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/lite.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard')); diff --git a/node_modules/@google-cloud/storage/node_modules/mime/package.json b/node_modules/@google-cloud/storage/node_modules/mime/package.json new file mode 100644 index 0000000..84f5132 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/package.json @@ -0,0 +1,52 @@ +{ + "author": { + "name": "Robert Kieffer", + "url": "http://github.com/broofa", + "email": "robert@broofa.com" + }, + "engines": { + "node": ">=10.0.0" + }, + "bin": { + "mime": "cli.js" + }, + "contributors": [], + "description": "A comprehensive library for mime-type mapping", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "benchmark": "*", + "chalk": "4.1.2", + "eslint": "8.1.0", + "mime-db": "1.50.0", + "mime-score": "1.2.0", + "mime-types": "2.1.33", + "mocha": "9.1.3", + "runmd": "*", + "standard-version": "9.3.2" + }, + "files": [ + "index.js", + "lite.js", + "Mime.js", + "cli.js", + "/types" + ], + "scripts": { + "prepare": "node src/build.js && runmd --output README.md src/README_js.md", + "release": "standard-version", + "benchmark": "node src/benchmark.js", + "md": "runmd --watch --output README.md src/README_js.md", + "test": "mocha src/test.js" + }, + "keywords": [ + "util", + "mime" + ], + "name": "mime", + "repository": { + "url": "https://github.com/broofa/mime", + "type": "git" + }, + "version": "3.0.0" +} diff --git a/node_modules/@google-cloud/storage/node_modules/mime/types/other.js b/node_modules/@google-cloud/storage/node_modules/mime/types/other.js new file mode 100644 index 0000000..bb6a035 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/types/other.js @@ -0,0 +1 @@ +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js b/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js new file mode 100644 index 0000000..5ee9937 --- /dev/null +++ b/node_modules/@google-cloud/storage/node_modules/mime/types/standard.js @@ -0,0 +1 @@ +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; \ No newline at end of file diff --git a/node_modules/@google-cloud/storage/package.json b/node_modules/@google-cloud/storage/package.json new file mode 100644 index 0000000..01462e9 --- /dev/null +++ b/node_modules/@google-cloud/storage/package.json @@ -0,0 +1,133 @@ +{ + "name": "@google-cloud/storage", + "description": "Cloud Storage Client Library for Node.js", + "version": "7.9.0", + "license": "Apache-2.0", + "author": "Google Inc.", + "engines": { + "node": ">=14" + }, + "repository": "googleapis/nodejs-storage", + "main": "./build/cjs/src/index.js", + "types": "./build/cjs/src/index.d.ts", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./build/esm/src/index.d.ts", + "default": "./build/esm/src/index.js" + }, + "require": { + "types": "./build/cjs/src/index.d.ts", + "default": "./build/cjs/src/index.js" + } + } + }, + "files": [ + "build/cjs/src", + "build/cjs/package.json", + "!build/cjs/src/**/*.map", + "build/esm/src", + "!build/esm/src/**/*.map" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google storage", + "storage" + ], + "scripts": { + "all-test": "npm test && npm run system-test && npm run samples-test", + "benchwrapper": "node bin/benchwrapper.js", + "check": "gts check", + "clean": "rm -rf build/", + "compile:cjs": "tsc -p ./tsconfig.cjs.json", + "compile:esm": "tsc -p .", + "compile": "npm run compile:cjs && npm run compile:esm", + "conformance-test": "mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js", + "docs-test": "linkinator docs", + "docs": "jsdoc -c .jsdoc.json", + "fix": "gts fix", + "lint": "gts check", + "postcompile": "cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src", + "postcompile:cjs": "babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json", + "precompile": "rm -rf build/", + "preconformance-test": "npm run compile:cjs -- --sourceMap", + "predocs-test": "npm run docs", + "predocs": "npm run compile:cjs -- --sourceMap", + "prelint": "cd samples; npm link ../; npm install", + "prepare": "npm run compile", + "presystem-test:esm": "npm run compile:esm", + "presystem-test": "npm run compile -- --sourceMap", + "pretest": "npm run compile -- --sourceMap", + "samples-test": "npm link && cd samples/ && npm link ../ && npm test && cd ../", + "system-test:esm": "mocha build/esm/system-test --timeout 600000 --exit", + "system-test": "mocha build/cjs/system-test --timeout 600000 --exit", + "test": "c8 mocha build/cjs/test" + }, + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.1.3", + "ent": "^2.2.0", + "fast-xml-parser": "^4.3.0", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "devDependencies": { + "@babel/cli": "^7.22.10", + "@babel/core": "^7.22.11", + "@google-cloud/pubsub": "^4.0.0", + "@grpc/grpc-js": "^1.0.3", + "@grpc/proto-loader": "^0.7.0", + "@types/async-retry": "^1.4.3", + "@types/compressible": "^2.0.0", + "@types/ent": "^2.2.1", + "@types/mime": "^3.0.0", + "@types/mime-types": "^2.1.0", + "@types/mocha": "^9.1.1", + "@types/mockery": "^1.4.29", + "@types/node": "^20.4.4", + "@types/node-fetch": "^2.1.3", + "@types/proxyquire": "^1.3.28", + "@types/request": "^2.48.4", + "@types/sinon": "^17.0.0", + "@types/tmp": "0.2.6", + "@types/uuid": "^8.0.0", + "@types/yargs": "^17.0.10", + "c8": "^9.0.0", + "form-data": "^4.0.0", + "gapic-tools": "^0.4.0", + "gts": "^5.0.0", + "jsdoc": "^4.0.0", + "jsdoc-fresh": "^3.0.0", + "jsdoc-region-tag": "^3.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "mockery": "^2.1.0", + "nock": "~13.5.0", + "node-fetch": "^2.6.7", + "pack-n-play": "^2.0.0", + "proxyquire": "^2.1.3", + "sinon": "^17.0.0", + "tmp": "^0.2.0", + "typescript": "^5.1.6", + "yargs": "^17.3.1" + } +} diff --git a/node_modules/@tootallnate/once/LICENSE b/node_modules/@tootallnate/once/LICENSE new file mode 100644 index 0000000..c4c56a2 --- /dev/null +++ b/node_modules/@tootallnate/once/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@tootallnate/once/README.md b/node_modules/@tootallnate/once/README.md new file mode 100644 index 0000000..bc980fd --- /dev/null +++ b/node_modules/@tootallnate/once/README.md @@ -0,0 +1,93 @@ +# @tootallnate/once + +### Creates a Promise that waits for a single event + +## Installation + +Install with `npm`: + +```bash +$ npm install @tootallnate/once +``` + +## API + +### once(emitter: EventEmitter, name: string, opts?: OnceOptions): Promise<[...Args]> + +Creates a Promise that waits for event `name` to occur on `emitter`, and resolves +the promise with an array of the values provided to the event handler. If an +`error` event occurs before the event specified by `name`, then the Promise is +rejected with the error argument. + +```typescript +import once from '@tootallnate/once'; +import { EventEmitter } from 'events'; + +const emitter = new EventEmitter(); + +setTimeout(() => { + emitter.emit('foo', 'bar'); +}, 100); + +const [result] = await once(emitter, 'foo'); +console.log({ result }); +// { result: 'bar' } +``` + +#### Promise Strong Typing + +The main feature that this module provides over other "once" implementations is that +the Promise that is returned is _**strongly typed**_ based on the type of `emitter` +and the `name` of the event. Some examples are shown below. + +_The process "exit" event contains a single number for exit code:_ + +```typescript +const [code] = await once(process, 'exit'); +// ^ number +``` +_A child process "exit" event contains either an exit code or a signal:_ + +```typescript +const child = spawn('echo', []); +const [code, signal] = await once(child, 'exit'); +// ^ number | null +// ^ string | null +``` + +_A forked child process "message" event is type `any`, so you can cast the Promise directly:_ + +```typescript +const child = fork('file.js'); + +// With `await` +const [message, _]: [WorkerPayload, unknown] = await once(child, 'message'); + +// With Promise +const messagePromise: Promise<[WorkerPayload, unknown]> = once(child, 'message'); + +// Better yet would be to leave it as `any`, and validate the payload +// at runtime with i.e. `ajv` + `json-schema-to-typescript` +``` + +_If the TypeScript definition does not contain an overload for the specified event name, then the Promise will have type `unknown[]` and your code will need to narrow the result manually:_ + +```typescript +interface CustomEmitter extends EventEmitter { + on(name: 'foo', listener: (a: string, b: number) => void): this; +} + +const emitter: CustomEmitter = new EventEmitter(); + +// "foo" event is a defined overload, so it's properly typed +const fooPromise = once(emitter, 'foo'); +// ^ Promise<[a: string, b: number]> + +// "bar" event in not a defined overload, so it gets `unknown[]` +const barPromise = once(emitter, 'bar'); +// ^ Promise +``` + +### OnceOptions + +- `signal` - `AbortSignal` instance to unbind event handlers before the Promise has been fulfilled. diff --git a/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/@tootallnate/once/dist/index.d.ts new file mode 100644 index 0000000..93d02a9 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.d.ts @@ -0,0 +1,7 @@ +/// +import { EventEmitter } from 'events'; +import { EventNames, EventListenerParameters, AbortSignal } from './types'; +export interface OnceOptions { + signal?: AbortSignal; +} +export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>; diff --git a/node_modules/@tootallnate/once/dist/index.js b/node_modules/@tootallnate/once/dist/index.js new file mode 100644 index 0000000..ca6385b --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports.default = once; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/@tootallnate/once/dist/index.js.map new file mode 100644 index 0000000..61708ca --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts new file mode 100644 index 0000000..eb2bbc6 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts @@ -0,0 +1,231 @@ +export declare type OverloadedParameters = T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; + (...args: infer A20): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; +} ? A1 | A2 | A3 | A4 | A5 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; +} ? A1 | A2 | A3 | A4 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; +} ? A1 | A2 | A3 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; +} ? A1 | A2 : T extends { + (...args: infer A1): any; +} ? A1 : any; diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/node_modules/@tootallnate/once/dist/overloaded-parameters.js new file mode 100644 index 0000000..207186d --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=overloaded-parameters.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map new file mode 100644 index 0000000..863f146 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/types.d.ts b/node_modules/@tootallnate/once/dist/types.d.ts new file mode 100644 index 0000000..58be828 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.d.ts @@ -0,0 +1,17 @@ +/// +import { EventEmitter } from 'events'; +import { OverloadedParameters } from './overloaded-parameters'; +export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never; +export declare type EventListener = F extends [ + T, + infer R, + ...any[] +] ? R : never; +export declare type EventParameters = OverloadedParameters; +export declare type EventNames = FirstParameter>; +export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>; +export declare type WithDefault = [T] extends [never] ? D : T; +export interface AbortSignal { + addEventListener: (name: string, listener: (...args: any[]) => any) => void; + removeEventListener: (name: string, listener: (...args: any[]) => any) => void; +} diff --git a/node_modules/@tootallnate/once/dist/types.js b/node_modules/@tootallnate/once/dist/types.js new file mode 100644 index 0000000..11e638d --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/types.js.map b/node_modules/@tootallnate/once/dist/types.js.map new file mode 100644 index 0000000..c768b79 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/package.json b/node_modules/@tootallnate/once/package.json new file mode 100644 index 0000000..69ce947 --- /dev/null +++ b/node_modules/@tootallnate/once/package.json @@ -0,0 +1,52 @@ +{ + "name": "@tootallnate/once", + "version": "2.0.0", + "description": "Creates a Promise that waits for a single event", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "jest", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/once.git" + }, + "keywords": [], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/once/issues" + }, + "devDependencies": { + "@types/jest": "^27.0.2", + "@types/node": "^12.12.11", + "abort-controller": "^3.0.0", + "jest": "^27.2.1", + "rimraf": "^3.0.0", + "ts-jest": "^27.0.5", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 10" + }, + "jest": { + "preset": "ts-jest", + "globals": { + "ts-jest": { + "diagnostics": false, + "isolatedModules": true + } + }, + "verbose": false, + "testEnvironment": "node", + "testMatch": [ + "/test/**/*.test.ts" + ] + } +} diff --git a/node_modules/@types/caseless/LICENSE b/node_modules/@types/caseless/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/caseless/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/caseless/README.md b/node_modules/@types/caseless/README.md new file mode 100644 index 0000000..b7e6d16 --- /dev/null +++ b/node_modules/@types/caseless/README.md @@ -0,0 +1,48 @@ +# Installation +> `npm install --save @types/caseless` + +# Summary +This package contains type definitions for caseless (https://github.com/mikeal/caseless). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/caseless. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/caseless/index.d.ts) +````ts +type KeyType = string; +type ValueType = any; +type RawDict = object; + +declare function caseless(dict?: RawDict): caseless.Caseless; + +declare namespace caseless { + function httpify(resp: object, headers: RawDict): Caseless; + + interface Caseless { + set(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; + set(dict: RawDict): void; + has(name: KeyType): KeyType | false; + get(name: KeyType): ValueType | undefined; + swap(name: KeyType): void; + del(name: KeyType): boolean; + } + + interface Httpified { + headers: RawDict; + setHeader(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; + setHeader(dict: RawDict): void; + hasHeader(name: KeyType): KeyType | false; + getHeader(name: KeyType): ValueType | undefined; + removeHeader(name: KeyType): boolean; + } +} + +export = caseless; + +```` + +### Additional Details + * Last updated: Mon, 06 Nov 2023 22:41:05 GMT + * Dependencies: none + +# Credits +These definitions were written by [downace](https://github.com/downace), [Matt R. Wilson](https://github.com/mastermatt), and [Emily Klassen](https://github.com/forivall). diff --git a/node_modules/@types/caseless/index.d.ts b/node_modules/@types/caseless/index.d.ts new file mode 100644 index 0000000..c0fa44c --- /dev/null +++ b/node_modules/@types/caseless/index.d.ts @@ -0,0 +1,29 @@ +type KeyType = string; +type ValueType = any; +type RawDict = object; + +declare function caseless(dict?: RawDict): caseless.Caseless; + +declare namespace caseless { + function httpify(resp: object, headers: RawDict): Caseless; + + interface Caseless { + set(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; + set(dict: RawDict): void; + has(name: KeyType): KeyType | false; + get(name: KeyType): ValueType | undefined; + swap(name: KeyType): void; + del(name: KeyType): boolean; + } + + interface Httpified { + headers: RawDict; + setHeader(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; + setHeader(dict: RawDict): void; + hasHeader(name: KeyType): KeyType | false; + getHeader(name: KeyType): ValueType | undefined; + removeHeader(name: KeyType): boolean; + } +} + +export = caseless; diff --git a/node_modules/@types/caseless/package.json b/node_modules/@types/caseless/package.json new file mode 100644 index 0000000..b50c59f --- /dev/null +++ b/node_modules/@types/caseless/package.json @@ -0,0 +1,35 @@ +{ + "name": "@types/caseless", + "version": "0.12.5", + "description": "TypeScript definitions for caseless", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/caseless", + "license": "MIT", + "contributors": [ + { + "name": "downace", + "githubUsername": "downace", + "url": "https://github.com/downace" + }, + { + "name": "Matt R. Wilson", + "githubUsername": "mastermatt", + "url": "https://github.com/mastermatt" + }, + { + "name": "Emily Klassen", + "githubUsername": "forivall", + "url": "https://github.com/forivall" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/caseless" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "89139c26e367c4adc78c0db606bf3e7b208f60498430b2c167551f7169d0b81c", + "typeScriptVersion": "4.5" +} \ No newline at end of file diff --git a/node_modules/@types/node/LICENSE b/node_modules/@types/node/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/node/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node/README.md b/node_modules/@types/node/README.md new file mode 100644 index 0000000..b67d126 --- /dev/null +++ b/node_modules/@types/node/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/node` + +# Summary +This package contains type definitions for node (https://nodejs.org/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node. + +### Additional Details + * Last updated: Sat, 30 Mar 2024 05:35:18 GMT + * Dependencies: [undici-types](https://npmjs.com/package/undici-types) + +# Credits +These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky). diff --git a/node_modules/@types/node/assert.d.ts b/node_modules/@types/node/assert.d.ts new file mode 100644 index 0000000..cd82143 --- /dev/null +++ b/node_modules/@types/node/assert.d.ts @@ -0,0 +1,996 @@ +/** + * The `node:assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/assert.js) + */ +declare module "assert" { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `node:assert`module will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + /** + * Set to the `actual` argument for methods such as {@link assert.strictEqual()}. + */ + actual: unknown; + /** + * Set to the `expected` argument for methods such as {@link assert.strictEqual()}. + */ + expected: unknown; + /** + * Set to the passed in operator value. + */ + operator: string; + /** + * Indicates if the message was auto-generated (`true`) or not. + */ + generatedMessage: boolean; + /** + * Value is always `ERR_ASSERTION` to show that the error is an assertion error. + */ + code: "ERR_ASSERTION"; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is deprecated and will be removed in a future version. + * Please consider using alternatives such as the `mock` helper function. + * @since v14.2.0, v12.19.0 + * @deprecated Deprecated + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * Example: + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * callsfunc(1, 2, 3); + * + * assert.deepStrictEqual(tracker.getCalls(callsfunc), + * [{ thisArg: undefined, arguments: [1, 2, 3] }]); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn + * @return An Array with all the calls to a tracked function. + */ + getCalls(fn: Function): CallTrackerCall[]; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * console.log(tracker.report()); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return An Array of objects containing information about the wrapper functions returned by `calls`. + */ + report(): CallTrackerReportInformation[]; + /** + * Reset calls of the call tracker. + * If a tracked function is passed as an argument, the calls will be reset for it. + * If no arguments are passed, all tracked functions will be reset. + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * + * callsfunc(); + * // Tracker was called once + * assert.strictEqual(tracker.getCalls(callsfunc).length, 1); + * + * tracker.reset(callsfunc); + * assert.strictEqual(tracker.getCalls(callsfunc).length, 0); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn a tracked function to reset. + */ + reset(fn?: Function): void; + /** + * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerCall { + thisArg: object; + arguments: unknown[]; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new() => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function, + ): never; + /** + * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error + * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'node:assert'; + * + * const obj1 = { + * a: { + * b: 1, + * }, + * }; + * const obj2 = { + * a: { + * b: 2, + * }, + * }; + * const obj3 = { + * a: { + * b: 1, + * }, + * }; + * const obj4 = { __proto__: obj1 }; + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default + * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'node:assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text', + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text', + * }, + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * }, + * ); + * + * // Using regular expressions to validate error properties: + * assert.throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text', + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i, + * }, + * ); + * + * // Fails due to the different `message` and `name` properties: + * assert.throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err, + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error, + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/, + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error', + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'node:assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function. + * + * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError, + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops', + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for`ifError()` itself. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error + * handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and`name` properties. + * + * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error, + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second + * argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject( + block: (() => Promise) | Promise, + message?: string | Error, + ): Promise; + function doesNotReject( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + const strict: + & Omit< + typeof assert, + | "equal" + | "notEqual" + | "deepEqual" + | "notDeepEqual" + | "ok" + | "strictEqual" + | "deepStrictEqual" + | "ifError" + | "strict" + > + & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module "node:assert" { + import assert = require("assert"); + export = assert; +} diff --git a/node_modules/@types/node/assert/strict.d.ts b/node_modules/@types/node/assert/strict.d.ts new file mode 100644 index 0000000..f333913 --- /dev/null +++ b/node_modules/@types/node/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module "assert/strict" { + import { strict } from "node:assert"; + export = strict; +} +declare module "node:assert/strict" { + import { strict } from "node:assert"; + export = strict; +} diff --git a/node_modules/@types/node/async_hooks.d.ts b/node_modules/@types/node/async_hooks.d.ts new file mode 100644 index 0000000..0667a61 --- /dev/null +++ b/node_modules/@types/node/async_hooks.d.ts @@ -0,0 +1,539 @@ +/** + * We strongly discourage the use of the `async_hooks` API. + * Other APIs that can cover most of its use cases include: + * + * * `AsyncLocalStorage` tracks async context + * * `process.getActiveResourcesInfo()` tracks active resources + * + * The `node:async_hooks` module provides an API to track asynchronous resources. + * It can be accessed using: + * + * ```js + * import async_hooks from 'node:async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/async_hooks.js) + */ +declare module "async_hooks" { + /** + * ```js + * import { executionAsyncId } from 'node:async_hooks'; + * import fs from 'node:fs'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * const path = '.'; + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on `promise execution tracking`. + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'node:fs'; + * import { executionAsyncId, executionAsyncResource } from 'node:async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'node:http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook, + * } from 'async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * }, + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on `promise execution tracking`. + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId a unique ID for the async resource + * @param type the type of the async resource + * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created + * @param resource reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in before is completed. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'node:async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { }, + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'node:async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }, + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg, + ): Func; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>(fn: Func): Func; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope( + fn: (this: This, ...args: any[]) => Result, + thisArg?: This, + ...args: any[] + ): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `node:async_hooks`module, `AsyncLocalStorage` should be preferred as it is a performant and memory + * safe implementation that involves significant optimizations that are non-obvious + * to implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'node:http'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Binds the given function to the current execution context. + * @since v19.8.0 + * @experimental + * @param fn The function to bind to the current execution context. + * @return A new function that calls `fn` within the captured execution context. + */ + static bind any>(fn: Func): Func; + /** + * Captures the current execution context and returns a function that accepts a + * function as an argument. Whenever the returned function is called, it + * calls the function passed to it within the captured context. + * + * ```js + * const asyncLocalStorage = new AsyncLocalStorage(); + * const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot()); + * const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore())); + * console.log(result); // returns 123 + * ``` + * + * AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple + * async context tracking purposes, for example: + * + * ```js + * class Foo { + * #runInAsyncScope = AsyncLocalStorage.snapshot(); + * + * get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); } + * } + * + * const foo = asyncLocalStorage.run(123, () => new Foo()); + * console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123 + * ``` + * @since v19.8.0 + * @experimental + * @return A new function with the signature `(fn: (...args) : R, ...args) : R`. + */ + static snapshot(): (fn: (...args: TArgs) => R, ...args: TArgs) => R; + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: () => R): R; + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module "node:async_hooks" { + export * from "async_hooks"; +} diff --git a/node_modules/@types/node/buffer.d.ts b/node_modules/@types/node/buffer.d.ts new file mode 100644 index 0000000..255e268 --- /dev/null +++ b/node_modules/@types/node/buffer.d.ts @@ -0,0 +1,2363 @@ +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/buffer.js) + */ +declare module "buffer" { + import { BinaryLike } from "node:crypto"; + import { ReadableStream as WebReadableStream } from "node:stream/web"; + /** + * This function returns `true` if `input` contains only valid UTF-8-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.4.0, v18.14.0 + * @param input The input to validate. + */ + export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + /** + * This function returns `true` if `input` contains only valid ASCII-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.6.0, v18.15.0 + * @param input The input to validate. + */ + export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "latin1" + | "binary"; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'node:buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new(size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts + * will be converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "transparent" | "native"; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export interface FileOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be + * converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "native" | "transparent"; + /** The File content-type. */ + type?: string; + /** The last modified date of the file. `Default`: Date.now(). */ + lastModified?: number; + } + /** + * A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files. + * @since v19.2.0, v18.13.0 + */ + export class File extends Blob { + constructor(sources: Array, fileName: string, options?: FileOptions); + /** + * The name of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly name: string; + /** + * The last modified date of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly lastModified: number; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + import { Blob as NodeBlob } from "buffer"; + // This conditional type will be the existing global Blob in a browser, or + // the copy below in a Node environment. + type __Blob = typeof globalThis extends { onmessage: any; Blob: any } ? {} : NodeBlob; + global { + namespace NodeJS { + export { BufferEncoding }; + } + // Buffer class + type BufferEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "base64" + | "base64url" + | "latin1" + | "binary" + | "hex"; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new(str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new(size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new(arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: readonly any[]): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new(buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * If `array` is an `Array`\-like object (that is, one with a `length` property of + * type `number`), it is treated as if it is an array, unless it is a `Buffer` or + * a `Uint8Array`. This means all other `TypedArray` variants get treated as an`Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`. + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from( + arrayBuffer: WithImplicitCoercion, + byteOffset?: number, + length?: number, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | readonly number[]): Buffer; + from(data: WithImplicitCoercion): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: "string"): string; + }, + encoding?: BufferEncoding, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength( + string: string | Buffer | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + encoding?: BufferEncoding, + ): number; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: readonly Uint8Array[], totalLength?: number): Buffer; + /** + * Copies the underlying memory of `view` into a new `Buffer`. + * + * ```js + * const u16 = new Uint16Array([0, 0xffff]); + * const buf = Buffer.copyBytesFrom(u16, 1, 1); + * u16[1] = 0; + * console.log(buf.length); // 2 + * console.log(buf[0]); // 255 + * console.log(buf[1]); // 255 + * ``` + * @since v19.8.0 + * @param view The {TypedArray} to copy. + * @param [offset=0] The starting offset within `view`. + * @param [length=view.length - offset] The number of elements from `view` to copy. + */ + copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`, + * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if + * `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer extends Uint8Array { + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: "Buffer"; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare( + target: Uint8Array, + targetStart?: number, + targetEnd?: number, + sourceStart?: number, + sourceEnd?: number, + ): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): Buffer; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): Buffer; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): Buffer; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * + * // Fill a buffer with empty string + * const c = Buffer.allocUnsafe(5).fill(''); + * + * console.log(c.fill('')); + * // Prints: + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. Empty value (string, Uint8Array, Buffer) is coerced to `0`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in`encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents + * of `buf`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Log the entire contents of a `Buffer`. + * + * const buf = Buffer.from('buffer'); + * + * for (const pair of buf.entries()) { + * console.log(pair); + * } + * // Prints: + * // [0, 98] + * // [1, 117] + * // [2, 102] + * // [3, 102] + * // [4, 101] + * // [5, 114] + * ``` + * @since v1.1.0 + */ + entries(): IterableIterator<[number, number]>; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const key of buf.keys()) { + * console.log(key); + * } + * // Prints: + * // 0 + * // 1 + * // 2 + * // 3 + * // 4 + * // 5 + * ``` + * @since v1.1.0 + */ + keys(): IterableIterator; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is + * called automatically when a `Buffer` is used in a `for..of` statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const value of buf.values()) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * + * for (const value of buf) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * ``` + * @since v1.1.0 + */ + values(): IterableIterator; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + interface Blob extends __Blob {} + /** + * `Blob` class is a global reference for `require('node:buffer').Blob` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { + onmessage: any; + Blob: infer T; + } ? T + : typeof NodeBlob; + } +} +declare module "node:buffer" { + export * from "buffer"; +} diff --git a/node_modules/@types/node/child_process.d.ts b/node_modules/@types/node/child_process.d.ts new file mode 100644 index 0000000..e5078b8 --- /dev/null +++ b/node_modules/@types/node/child_process.d.ts @@ -0,0 +1,1542 @@ +/** + * The `node:child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `node:child_process` module provides a handful of + * synchronous and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/child_process.js) + */ +declare module "child_process" { + import { ObjectEncodingOptions } from "node:fs"; + import { Abortable, EventEmitter } from "node:events"; + import * as net from "node:net"; + import { Pipe, Readable, Stream, Writable } from "node:stream"; + import { URL } from "node:url"; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * const assert = require('node:assert'); + * const fs = require('node:fs'); + * const child_process = require('node:child_process'); + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ], + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined, // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'], + * }, + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * Calls {@link ChildProcess.kill} with `'SIGTERM'`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * const cp = require('node:child_process'); + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received + * and buffered in the socket will not be sent to the child. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * const subprocess = require('node:child_process').fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = require('node:net').createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `node:net` module,`node:dgram` module servers use exactly the same workflow with the exceptions of + * listening on a `'message'` event instead of `'connection'` and using`server.bind()` instead of `server.listen()`. This is, however, only + * supported on Unix platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * const { fork } = require('node:child_process'); + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = require('node:net').createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send( + message: Serializable, + sendHandle?: SendHandle, + options?: MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: "spawn", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "exit", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "message", message: Serializable, sendHandle: SendHandle): boolean; + emit(event: "spawn", listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: "spawn", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: "spawn", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: "spawn", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "close", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "exit", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: "spawn", listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio + extends ChildProcess + { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = "overlapped" | "pipe" | "ignore" | "inherit"; + type StdioOptions = IOType | Array; + type SerializationType = "json" | "advanced"; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default false + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = "inherit" | "ignore" | Stream; + type StdioPipeNamed = "pipe" | "overlapped"; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple< + Stdin extends StdioNull | StdioPipe, + Stdout extends StdioNull | StdioPipe, + Stderr extends StdioNull | StdioPipe, + > extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env, + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * const { spawn } = require('node:child_process'); + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent. Retrieve + * it with the`process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn( + command: string, + args?: readonly string[], + options?: SpawnOptionsWithoutStdio, + ): ChildProcessWithoutNullStreams; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, args: readonly string[], options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + stdout?: string; + stderr?: string; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * const { exec } = require('node:child_process'); + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * const { exec } = require('node:child_process'); + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const exec = util.promisify(require('node:child_process').exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { exec } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec( + command: string, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: "buffer" | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = + & Omit + & Omit + & { code?: string | number | undefined | null }; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * const { execFile } = require('node:child_process'); + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const execFile = util.promisify(require('node:child_process').execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { execFile } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + function execFile(file: string, args?: readonly string[] | null): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile( + file: string, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile( + file: string, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile( + file: string, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * const { fork } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string, options?: ForkOptions): ChildProcess; + function fork(modulePath: string, args?: readonly string[], options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: "buffer" | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: readonly string[]): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithStringEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithBufferEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args?: readonly string[], + options?: SpawnSyncOptions, + ): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + /** + * Can be set to 'pipe', 'inherit, or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: "buffer" | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: "buffer" | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: readonly string[]): Buffer; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithStringEncoding, + ): string; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithBufferEncoding, + ): Buffer; + function execFileSync(file: string, args?: readonly string[], options?: ExecFileSyncOptions): string | Buffer; +} +declare module "node:child_process" { + export * from "child_process"; +} diff --git a/node_modules/@types/node/cluster.d.ts b/node_modules/@types/node/cluster.d.ts new file mode 100644 index 0000000..60fbbd4 --- /dev/null +++ b/node_modules/@types/node/cluster.d.ts @@ -0,0 +1,578 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process isolation + * is not needed, use the [`worker_threads`](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html) + * module instead, which allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/cluster.js) + */ +declare module "cluster" { + import * as child from "node:child_process"; + import EventEmitter = require("node:events"); + import * as net from "node:net"; + type SerializationType = "json" | "advanced"; + export interface ClusterSettings { + /** + * List of string arguments passed to the Node.js executable. + * @default process.execArgv + */ + execArgv?: string[] | undefined; + /** + * File path to worker file. + * @default process.argv[1] + */ + exec?: string | undefined; + /** + * String arguments passed to worker. + * @default process.argv.slice(2) + */ + args?: string[] | undefined; + /** + * Whether or not to send output to parent's stdio. + * @default false + */ + silent?: boolean | undefined; + /** + * Configures the stdio of forked processes. Because the cluster module relies on IPC to function, this configuration must + * contain an `'ipc'` entry. When this option is provided, it overrides `silent`. See [`child_prcess.spawn()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processspawncommand-args-options)'s + * [`stdio`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#optionsstdio). + */ + stdio?: any[] | undefined; + /** + * Sets the user identity of the process. (See [`setuid(2)`](https://man7.org/linux/man-pages/man2/setuid.2.html).) + */ + uid?: number | undefined; + /** + * Sets the group identity of the process. (See [`setgid(2)`](https://man7.org/linux/man-pages/man2/setgid.2.html).) + */ + gid?: number | undefined; + /** + * Sets inspector port of worker. This can be a number, or a function that takes no arguments and returns a number. + * By default each worker gets its own port, incremented from the primary's `process.debugPort`. + */ + inspectPort?: number | (() => number) | undefined; + /** + * Specify the kind of serialization used for sending messages between processes. Possible values are `'json'` and `'advanced'`. + * See [Advanced serialization for `child_process`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#advanced-serialization) for more details. + * @default false + */ + serialization?: SerializationType | undefined; + /** + * Current working directory of the worker process. + * @default undefined (inherits from parent process) + */ + cwd?: string | undefined; + /** + * Hide the forked processes console window that would normally be created on Windows systems. + * @default false + */ + windowsHide?: boolean | undefined; + } + export interface Address { + address: string; + port: number; + /** + * The `addressType` is one of: + * + * * `4` (TCPv4) + * * `6` (TCPv6) + * * `-1` (Unix domain socket) + * * `'udp4'` or `'udp6'` (UDPv4 or UDPv6) + */ + addressType: 4 | 6 | -1 | "udp4" | "udp6"; + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the `id`. + * + * While a worker is alive, this is the key that indexes it in `cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using [`child_process.fork()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options), the returned object + * from this function is stored as `.process`. In a worker, the global `process` is stored. + * + * See: [Child Process module](https://nodejs.org/docs/latest-v20.x/api/child_process.html#child_processforkmodulepath-args-options). + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to [`ChildProcess.send()`](https://nodejs.org/docs/latest-v20.x/api/child_process.html#subprocesssendmessage-sendhandle-options-callback). + * + * In a worker, this sends a message to the primary. It is identical to `process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + callback?: (error: Error | null) => void, + ): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + options?: child.MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with `signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is [`kill()`](https://nodejs.org/docs/latest-v20.x/api/process.html#processkillpid-signal). + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call `.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const net = require('node:net'); + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "exit", listener: (code: number, signal: string) => void): this; + addListener(event: "listening", listener: (address: Address) => void): this; + addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "exit", code: number, signal: string): boolean; + emit(event: "listening", address: Address): boolean; + emit(event: "message", message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "exit", listener: (code: number, signal: string) => void): this; + on(event: "listening", listener: (address: Address) => void): this; + on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "exit", listener: (code: number, signal: string) => void): this; + once(event: "listening", listener: (address: Address) => void): this; + once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependListener(event: "listening", listener: (address: Address) => void): this; + prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: "online", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependOnceListener(event: "listening", listener: (address: Address) => void): this; + prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: "online", listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + /** + * Spawn a new worker process. + * + * This can only be called from the primary process. + * @param env Key/value pairs to add to worker process environment. + * @since v0.6.0 + */ + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + /** + * True if the process is a primary. This is determined by the `process.env.NODE_UNIQUE_ID`. If `process.env.NODE_UNIQUE_ID` + * is undefined, then `isPrimary` is `true`. + * @since v16.0.0 + */ + readonly isPrimary: boolean; + /** + * True if the process is not a primary (it is the negation of `cluster.isPrimary`). + * @since v0.6.0 + */ + readonly isWorker: boolean; + /** + * The scheduling policy, either `cluster.SCHED_RR` for round-robin or `cluster.SCHED_NONE` to leave it to the operating system. This is a + * global setting and effectively frozen once either the first worker is spawned, or [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings) + * is called, whichever comes first. + * + * `SCHED_RR` is the default on all operating systems except Windows. Windows will change to `SCHED_RR` once libuv is able to effectively distribute + * IOCP handles without incurring a large performance hit. + * + * `cluster.schedulingPolicy` can also be set through the `NODE_CLUSTER_SCHED_POLICY` environment variable. Valid values are `'rr'` and `'none'`. + * @since v0.11.2 + */ + schedulingPolicy: number; + /** + * After calling [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings) + * (or [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv)) this settings object will contain + * the settings, including the default values. + * + * This object is not intended to be changed or set manually. + * @since v0.7.1 + */ + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use [`.setupPrimary()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clustersetupprimarysettings) instead. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in `cluster.settings`. + * + * Any settings changes only affect future calls to [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv) + * and have no effect on workers that are already running. + * + * The only attribute of a worker that cannot be set via `.setupPrimary()` is the `env` passed to + * [`.fork()`](https://nodejs.org/docs/latest-v20.x/api/cluster.html#clusterforkenv). + * + * The defaults above apply to the first call only; the defaults for later calls are the current values at the time of + * `cluster.setupPrimary()` is called. + * + * ```js + * import cluster from 'node:cluster'; + * + * cluster.setupPrimary({ + * exec: 'worker.js', + * args: ['--use', 'https'], + * silent: true, + * }); + * cluster.fork(); // https worker + * cluster.setupPrimary({ + * exec: 'worker.js', + * args: ['--use', 'http'], + * }); + * cluster.fork(); // http worker + * ``` + * + * This can only be called from the primary process. + * @since v16.0.0 + */ + setupPrimary(settings?: ClusterSettings): void; + /** + * A reference to the current worker object. Not available in the primary process. + * + * ```js + * import cluster from 'node:cluster'; + * + * if (cluster.isPrimary) { + * console.log('I am primary'); + * cluster.fork(); + * cluster.fork(); + * } else if (cluster.isWorker) { + * console.log(`I am worker #${cluster.worker.id}`); + * } + * ``` + * @since v0.7.0 + */ + readonly worker?: Worker | undefined; + /** + * A hash that stores the active worker objects, keyed by `id` field. This makes it easy to loop through all the workers. It is only available in the primary process. + * + * A worker is removed from `cluster.workers` after the worker has disconnected _and_ exited. The order between these two events cannot be determined in advance. However, it + * is guaranteed that the removal from the `cluster.workers` list happens before the last `'disconnect'` or `'exit'` event is emitted. + * + * ```js + * import cluster from 'node:cluster'; + * + * for (const worker of Object.values(cluster.workers)) { + * worker.send('big announcement to all workers'); + * } + * ``` + * @since v0.7.0 + */ + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: (worker: Worker) => void): this; + addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: "fork", listener: (worker: Worker) => void): this; + addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + addListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: (worker: Worker) => void): this; + addListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect", worker: Worker): boolean; + emit(event: "exit", worker: Worker, code: number, signal: string): boolean; + emit(event: "fork", worker: Worker): boolean; + emit(event: "listening", worker: Worker, address: Address): boolean; + emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online", worker: Worker): boolean; + emit(event: "setup", settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: (worker: Worker) => void): this; + on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: "fork", listener: (worker: Worker) => void): this; + on(event: "listening", listener: (worker: Worker, address: Address) => void): this; + on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: (worker: Worker) => void): this; + on(event: "setup", listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: (worker: Worker) => void): this; + once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: "fork", listener: (worker: Worker) => void): this; + once(event: "listening", listener: (worker: Worker, address: Address) => void): this; + once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: (worker: Worker) => void): this; + once(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: "fork", listener: (worker: Worker) => void): this; + prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener( + event: "message", + listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void, + ): this; + prependListener(event: "online", listener: (worker: Worker) => void): this; + prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; + prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; + prependOnceListener(event: "online", listener: (worker: Worker) => void): this; + prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module "node:cluster" { + export * from "cluster"; + export { default as default } from "cluster"; +} diff --git a/node_modules/@types/node/console.d.ts b/node_modules/@types/node/console.d.ts new file mode 100644 index 0000000..bcc3450 --- /dev/null +++ b/node_modules/@types/node/console.d.ts @@ -0,0 +1,415 @@ +/** + * The `node:console` module provides a simple debugging console that is similar to + * the JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()`, and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('node:console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/console.js) + */ +declare module "console" { + import console = require("node:console"); + export = console; +} +declare module "node:console" { + import { InspectOptions } from "node:util"; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`. + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string + * values are concatenated. See `util.format()` for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation`length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation`length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See `util.format()` for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can't be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: readonly string[]): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + * @param [label='default'] + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('bunch-of-stuff'); + * // Do a bunch of stuff. + * console.timeEnd('bunch-of-stuff'); + * // Prints: bunch-of-stuff: 225.438ms + * ``` + * @since v0.1.104 + * @param [label='default'] + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + * @param [label='default'] + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. + * Starts a JavaScript CPU profile with an optional label. + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Adds an event with the label `label` to the Timeline panel of the inspector. + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + ignoreErrors?: boolean | undefined; + colorMode?: boolean | "auto" | undefined; + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new(options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/node_modules/@types/node/constants.d.ts b/node_modules/@types/node/constants.d.ts new file mode 100644 index 0000000..c3ac2b8 --- /dev/null +++ b/node_modules/@types/node/constants.d.ts @@ -0,0 +1,19 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module "constants" { + import { constants as osConstants, SignalConstants } from "node:os"; + import { constants as cryptoConstants } from "node:crypto"; + import { constants as fsConstants } from "node:fs"; + + const exp: + & typeof osConstants.errno + & typeof osConstants.priority + & SignalConstants + & typeof cryptoConstants + & typeof fsConstants; + export = exp; +} + +declare module "node:constants" { + import constants = require("constants"); + export = constants; +} diff --git a/node_modules/@types/node/crypto.d.ts b/node_modules/@types/node/crypto.d.ts new file mode 100644 index 0000000..d121e47 --- /dev/null +++ b/node_modules/@types/node/crypto.d.ts @@ -0,0 +1,4487 @@ +/** + * The `node:crypto` module provides cryptographic functionality that includes a + * set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify + * functions. + * + * ```js + * const { createHmac } = await import('node:crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/crypto.js) + */ +declare module "crypto" { + import * as stream from "node:stream"; + import { PeerCertificate } from "node:tls"; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of HTML5's `keygen` element. + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `node:crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man3.0/man1/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'node:buffer'; + * const { Certificate } = await import('node:crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v20.x/docs/api/crypto.html#crypto-constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode for TLS v1.3 */ + const SSL_OP_ALLOW_NO_DHE_KEX: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's version identifier of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + /** Instructs OpenSSL to disable encrypt-then-MAC. */ + const SSL_OP_NO_ENCRYPT_THEN_MAC: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to disable renegotiation. */ + const SSL_OP_NO_RENEGOTIATION: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + /** Instructs OpenSSL to turn off SSL v2 */ + const SSL_OP_NO_SSLv2: number; + /** Instructs OpenSSL to turn off SSL v3 */ + const SSL_OP_NO_SSLv3: number; + /** Instructs OpenSSL to disable use of RFC4507bis tickets. */ + const SSL_OP_NO_TICKET: number; + /** Instructs OpenSSL to turn off TLS v1 */ + const SSL_OP_NO_TLSv1: number; + /** Instructs OpenSSL to turn off TLS v1.1 */ + const SSL_OP_NO_TLSv1_1: number; + /** Instructs OpenSSL to turn off TLS v1.2 */ + const SSL_OP_NO_TLSv1_2: number; + /** Instructs OpenSSL to turn off TLS v1.3 */ + const SSL_OP_NO_TLSv1_3: number; + /** Instructs OpenSSL server to prioritize ChaCha20-Poly1305 when the client does. This option has no effect if `SSL_OP_CIPHER_SERVER_PREFERENCE` is not enabled. */ + const SSL_OP_PRIORITIZE_CHACHA: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHash, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. If it is a string, please consider `caveats when using strings as inputs to cryptographic APIs`. If it was + * obtained from a cryptographically secure source of entropy, such as {@link randomBytes} or {@link generateKey}, its length should not + * exceed the block size of `algorithm` (e.g., 512 bits for SHA-256). + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = "base64" | "base64url" | "hex" | "binary"; + type CharacterEncoding = "utf8" | "utf-8" | "utf16le" | "utf-16le" | "latin1"; + type LegacyCharacterEncoding = "ascii" | "binary" | "ucs2" | "ucs-2"; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { createHash } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: HashOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = "secret" | "public" | "private"; + interface KeyExportOptions { + type: "pkcs1" | "spki" | "pkcs8" | "sec1"; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: "jwk"; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { KeyObject } = await import('node:crypto'); + * const { subtle } = globalThis.crypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256, + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * For asymmetric keys, this property represents the size of the embedded key in + * bytes. This property is `undefined` for symmetric keys. + */ + asymmetricKeySize?: number | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<"pem">): string | Buffer; + export(options?: KeyExportOptions<"der">): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * Returns `true` or `false` depending on whether the keys have exactly the same + * type, value, and parameters. This method is not [constant time](https://en.wikipedia.org/wiki/Timing_attack). + * @since v17.7.0, v16.15.0 + * @param otherKeyObject A `KeyObject` with which to compare `keyObject`. + */ + equals(otherKeyObject: KeyObject): boolean; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = "aes-128-ccm" | "aes-192-ccm" | "aes-256-ccm" | "chacha20-poly1305"; + type CipherGCMTypes = "aes-128-gcm" | "aes-192-gcm" | "aes-256-gcm"; + type CipherOCBTypes = "aes-128-ocb" | "aes-192-ocb" | "aes-256-ocb"; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`. + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `password` is used to derive the cipher key and initialization vector (IV). + * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createCipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode + * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when + * they are used in order to avoid the risk of IV reuse that causes + * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead. + * @param options `stream.transform` options + */ + function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher; + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): CipherCCM; + function createCipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): CipherOCB; + function createCipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): CipherGCM; + function createCipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipher} or {@link createCipheriv} methods are + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * + * import { + * pipeline, + * } from 'node:stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then`inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead. + * @param options `stream.transform` options + */ + function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher; + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): DecipherCCM; + function createDecipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): DecipherOCB; + function createDecipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): DecipherGCM; + function createDecipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipher} or {@link createDecipheriv} methods are + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * let chunk; + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "pkcs8" | "sec1" | undefined; + passphrase?: string | Buffer | undefined; + encoding?: string | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "spki" | undefined; + encoding?: string | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey, + * } = await import('node:crypto'); + * + * generateKey('hmac', { length: 512 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: "hmac" | "aes", + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void, + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync, + * } = await import('node:crypto'); + * + * const key = generateKeySync('hmac', { length: 512 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: "hmac" | "aes", + options: { + length: number; + }, + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: "jwk"; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = "der" | "ieee-p1363"; + interface SigningOptions { + /** + * @see crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {} + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1', + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + sign( + privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + outputFormat: BinaryToTextEncoding, + ): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: string, + signature_format?: BinaryToTextEncoding, + ): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createDiffieHellman, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values unless they have been + * generated or computed already, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * This function is a thin wrapper around [`DH_generate_key()`](https://www.openssl.org/docs/man3.0/man3/DH_generate_key.html). In particular, + * once a private key has been generated or set, calling this function only updates + * the public key but does not generate a new private key. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret( + otherPublicKey: NodeJS.ArrayBufferView, + inputEncoding: null, + outputEncoding: BinaryToTextEncoding, + ): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * + * This function does not automatically compute the associated public key. Either `diffieHellman.setPublicKey()` or `diffieHellman.generateKeys()` can be + * used to manually provide the public key or to automatically derive it. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `node:constants` module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are listed in the documentation for `DiffieHellmanGroup`. + * + * The returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman, + * } = await import('node:crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2, + * } = await import('node:crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync, + * } = await import('node:crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + ): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2**48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill( + buffer: T, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + size: number, + callback: (err: Error | null, buf: T) => void, + ): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt, + * } = await import('node:crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + options: ScryptOptions, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync, + * } = await import('node:crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * ```js + * const { + * getCiphers, + * } = await import('node:crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves, + * } = await import('node:crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. + * Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes, + * } = await import('node:crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createECDH, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'`format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH, + * } = await import('node:crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: "latin1" | "hex" | "base64" | "base64url", + format?: "uncompressed" | "compressed" | "hybrid", + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function compares the underlying bytes that represent the given`ArrayBuffer`, `TypedArray`, or `DataView` instances using a constant-time + * algorithm. + * + * This function does not leak timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * **When both of the inputs are `Float32Array`s or`Float64Array`s, this function might return unexpected results due to IEEE 754** + * **encoding of floating-point numbers. In particular, neither `x === y` nor`Object.is(x, y)` implies that the byte representations of two floating-point** + * **numbers `x` and `y` are equal.** + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + type KeyType = "rsa" | "rsa-pss" | "dsa" | "ec" | "ed25519" | "ed448" | "x25519" | "x448"; + type KeyFormat = "pem" | "der" | "jwk"; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + /** + * Must be `'named'` or `'explicit'`. Default: `'named'`. + */ + paramEncoding?: "explicit" | "named" | undefined; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs1" | "pkcs8"; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ECKeyPairOptions extends ECKeyPairKeyObjectOptions { + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "sec1" | "pkcs8"; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync, + * } = await import('node:crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa", options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa-pss", options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "dsa", options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ec", options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed25519", options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed448", options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x25519", options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x448", options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair, + * } = await import('node:crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + namespace generateKeyPair { + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "rsa", options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "dsa", options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ec", options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options?: ED25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ed448", options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options?: X25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "x448", options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + ): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + callback: (error: Error | null, data: Buffer) => void, + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void, + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + type CipherMode = "cbc" | "ccm" | "cfb" | "ctr" | "ecb" | "gcm" | "ocb" | "ofb" | "stream" | "wrap" | "xts"; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdf, + * } = await import('node:crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf( + digest: string, + irm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: ArrayBuffer) => void, + ): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdfSync, + * } = await import('node:crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync( + digest: string, + ikm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + ): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + type UUID = `${string}-${string}-${string}-${string}-${string}`; + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): UUID; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject?: "always" | "default" | "never"; + /** + * @default true + */ + wildcards?: boolean; + /** + * @default true + */ + partialWildcards?: boolean; + /** + * @default false + */ + multiLabelWildcards?: boolean; + /** + * @default false + */ + singleLabelSubdomains?: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('node:crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * + * Because computing the SHA-256 fingerprint is usually faster and because it is + * only half the size of the SHA-512 fingerprint, `x509.fingerprint256` may be + * a better choice. While SHA-512 presumably provides a higher level of security in + * general, the security of SHA-256 matches that of most algorithms that are + * commonly used to sign certificates. + * @since v17.2.0, v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate. + * + * This is a comma-separated list of subject alternative names. Each entry begins + * with a string identifying the kind of the subject alternative name followed by + * a colon and the value associated with the entry. + * + * Earlier versions of Node.js incorrectly assumed that it is safe to split this + * property at the two-character sequence `', '` (see [CVE-2021-44532](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44532)). However, + * both malicious and legitimate certificates can contain subject alternative names + * that include this sequence when represented as a string. + * + * After the prefix denoting the type of the entry, the remainder of each entry + * might be enclosed in quotes to indicate that the value is a JSON string literal. + * For backward compatibility, Node.js only uses JSON string literals within this + * property when necessary to avoid ambiguity. Third-party code should be prepared + * to handle both possible entry formats. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * A textual representation of the certificate's authority information access + * extension. + * + * This is a line feed separated list of access descriptions. Each line begins with + * the access method and the kind of the access location, followed by a colon and + * the value associated with the access location. + * + * After the prefix denoting the access method and the kind of the access location, + * the remainder of each line might be enclosed in quotes to indicate that the + * value is a JSON string literal. For backward compatibility, Node.js only uses + * JSON string literals within this property when necessary to avoid ambiguity. + * Third-party code should be prepared to handle both possible entry formats. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsBigInt, + callback: (err: Error | null, prime: bigint) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsArrayBuffer, + callback: (err: Error | null, prime: ArrayBuffer) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptions, + callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, + ): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most `2**-64` for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime( + value: LargeNumberLike, + options: CheckPrimeOptions, + callback: (err: Error | null, result: boolean) => void, + ): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. The `flags`is a bit field taking one of or a mix of the following flags (defined in`crypto.constants`): + * + * * `crypto.constants.ENGINE_METHOD_RSA` + * * `crypto.constants.ENGINE_METHOD_DSA` + * * `crypto.constants.ENGINE_METHOD_DH` + * * `crypto.constants.ENGINE_METHOD_RAND` + * * `crypto.constants.ENGINE_METHOD_EC` + * * `crypto.constants.ENGINE_METHOD_CIPHERS` + * * `crypto.constants.ENGINE_METHOD_DIGESTS` + * * `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * * `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * * `crypto.constants.ENGINE_METHOD_ALL` + * * `crypto.constants.ENGINE_METHOD_NONE` + * @since v0.11.11 + * @param flags + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for {@link webcrypto.getRandomValues}. This + * implementation is not compliant with the Web Crypto spec, to write + * web-compatible code use {@link webcrypto.getRandomValues} instead. + * @since v17.4.0 + * @return Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki"; + type KeyType = "private" | "public" | "secret"; + type KeyUsage = + | "decrypt" + | "deriveBits" + | "deriveKey" + | "encrypt" + | "sign" + | "unwrapKey" + | "verify" + | "wrapKey"; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): UUID; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: "CryptoKey"; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits( + algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + length: number, + ): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: + | AlgorithmIdentifier + | AesDerivedKeyParams + | HmacImportParams + | HkdfParams + | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: "jwk", key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey( + algorithm: RsaHashedKeyGenParams | EcKeyGenParams, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AlgorithmIdentifier, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: "jwk", + keyData: JsonWebKey, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + signature: BufferSource, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey( + format: KeyFormat, + key: CryptoKey, + wrappingKey: CryptoKey, + wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + ): Promise; + } + } + + global { + var crypto: typeof globalThis extends { + crypto: infer T; + onmessage: any; + } ? T + : webcrypto.Crypto; + } +} +declare module "node:crypto" { + export * from "crypto"; +} diff --git a/node_modules/@types/node/dgram.d.ts b/node_modules/@types/node/dgram.d.ts new file mode 100644 index 0000000..0b86ae7 --- /dev/null +++ b/node_modules/@types/node/dgram.d.ts @@ -0,0 +1,596 @@ +/** + * The `node:dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/dgram.js) + */ +declare module "dgram" { + import { AddressInfo } from "node:net"; + import * as dns from "node:dns"; + import { Abortable, EventEmitter } from "node:events"; + interface RemoteInfo { + address: string; + family: "IPv4" | "IPv6"; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = "udp4" | "udp6"; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: + | (( + hostname: string, + options: dns.LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ) => void) + | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'node:cluster'; + * import dgram from 'node:dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family`, and `port`properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of bytes queued for sending. + */ + getSendQueueSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of send requests currently in the queue awaiting to be processed. + */ + getSendQueueCount(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on`localhost`: + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send( + msg: string | Uint8Array | readonly any[], + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no additional effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + /** + * Calls `socket.close()` and returns a promise that fulfills when the socket has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } +} +declare module "node:dgram" { + export * from "dgram"; +} diff --git a/node_modules/@types/node/diagnostics_channel.d.ts b/node_modules/@types/node/diagnostics_channel.d.ts new file mode 100644 index 0000000..cd4bd71 --- /dev/null +++ b/node_modules/@types/node/diagnostics_channel.d.ts @@ -0,0 +1,545 @@ +/** + * The `node:diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @since v15.1.0, v14.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/diagnostics_channel.js) + */ +declare module "diagnostics_channel" { + import { AsyncLocalStorage } from "node:async_hooks"; + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string | symbol): boolean; + /** + * This is the primary entry-point for anyone wanting to publish to a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string | symbol): Channel; + type ChannelListener = (message: unknown, name: string | symbol) => void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * diagnostics_channel.subscribe('my-channel', (message, name) => { + * // Received data + * }); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The handler to receive channel messages + */ + function subscribe(name: string | symbol, onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with {@link subscribe}. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * function onMessage(message, name) { + * // Received data + * } + * + * diagnostics_channel.subscribe('my-channel', onMessage); + * + * diagnostics_channel.unsubscribe('my-channel', onMessage); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + function unsubscribe(name: string | symbol, onMessage: ChannelListener): boolean; + /** + * Creates a `TracingChannel` wrapper for the given `TracingChannel Channels`. If a name is given, the corresponding tracing + * channels will be created in the form of `tracing:${name}:${eventType}` where`eventType` corresponds to the types of `TracingChannel Channels`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channelsByName = diagnostics_channel.tracingChannel('my-channel'); + * + * // or... + * + * const channelsByCollection = diagnostics_channel.tracingChannel({ + * start: diagnostics_channel.channel('tracing:my-channel:start'), + * end: diagnostics_channel.channel('tracing:my-channel:end'), + * asyncStart: diagnostics_channel.channel('tracing:my-channel:asyncStart'), + * asyncEnd: diagnostics_channel.channel('tracing:my-channel:asyncEnd'), + * error: diagnostics_channel.channel('tracing:my-channel:error'), + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param nameOrChannels Channel name or object containing all the `TracingChannel Channels` + * @return Collection of channels to trace with + */ + function tracingChannel< + StoreType = unknown, + ContextType extends object = StoreType extends object ? StoreType : object, + >( + nameOrChannels: string | TracingChannelCollection, + ): TracingChannel; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is used to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string | symbol; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string | symbol); + /** + * Publish a message to any subscribers to the channel. This will trigger + * message handlers synchronously so they will execute within the same context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message', + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link subscribe(name, onMessage)} + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link unsubscribe(name, onMessage)} + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + /** + * When `channel.runStores(context, ...)` is called, the given context data + * will be applied to any store bound to the channel. If the store has already been + * bound the previous `transform` function will be replaced with the new one. + * The `transform` function may be omitted to set the given context data as the + * context directly. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (data) => { + * return { data }; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to which to bind the context data + * @param transform Transform context data before setting the store context + */ + bindStore(store: AsyncLocalStorage, transform?: (context: ContextType) => StoreType): void; + /** + * Remove a message handler previously registered to this channel with `channel.bindStore(store)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store); + * channel.unbindStore(store); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to unbind from the channel. + * @return `true` if the store was found, `false` otherwise. + */ + unbindStore(store: any): void; + /** + * Applies the given data to any AsyncLocalStorage instances bound to the channel + * for the duration of the given function, then publishes to the channel within + * the scope of that data is applied to the stores. + * + * If a transform function was given to `channel.bindStore(store)` it will be + * applied to transform the message data before it becomes the context value for + * the store. The prior storage context is accessible from within the transform + * function in cases where context linking is required. + * + * The context applied to the store should be accessible in any async code which + * continues from execution which began during the given function, however + * there are some situations in which `context loss` may occur. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (message) => { + * const parent = store.getStore(); + * return new Span(message, parent); + * }); + * channel.runStores({ some: 'message' }, () => { + * store.getStore(); // Span({ some: 'message' }) + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param context Message to send to subscribers and bind to stores + * @param fn Handler to run within the entered storage context + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runStores(): void; + } + interface TracingChannelSubscribers { + start: (message: ContextType) => void; + end: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncStart: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncEnd: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + error: ( + message: ContextType & { + error: unknown; + }, + ) => void; + } + interface TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + } + /** + * The class `TracingChannel` is a collection of `TracingChannel Channels` which + * together express a single traceable action. It is used to formalize and + * simplify the process of producing events for tracing application flow.{@link tracingChannel} is used to construct a`TracingChannel`. As with `Channel` it is recommended to create and reuse a + * single `TracingChannel` at the top-level of the file rather than creating them + * dynamically. + * @since v19.9.0 + * @experimental + */ + class TracingChannel implements TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + /** + * Helper to subscribe a collection of functions to the corresponding channels. + * This is the same as calling `channel.subscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.subscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + */ + subscribe(subscribers: TracingChannelSubscribers): void; + /** + * Helper to unsubscribe a collection of functions from the corresponding channels. + * This is the same as calling `channel.unsubscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.unsubscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + * @return `true` if all handlers were successfully unsubscribed, and `false` otherwise. + */ + unsubscribe(subscribers: TracingChannelSubscribers): void; + /** + * Trace a synchronous function call. This will always produce a `start event` and `end event` around the execution and may produce an `error event` if the given function throws an error. + * This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceSync(() => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Function to wrap a trace around + * @param context Shared object to correlate events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceSync( + fn: (this: ThisArg, ...args: Args) => any, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a promise-returning function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce an `asyncStart event` and `asyncEnd event` when a promise continuation is reached. It may also + * produce an `error event` if the given function throws an error or the + * returned promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.tracePromise(async () => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Promise-returning function to wrap a trace around + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return Chained from promise returned by the given function + */ + tracePromise( + fn: (this: ThisArg, ...args: Args) => Promise, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a callback-receiving function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce a `asyncStart event` and `asyncEnd event` around the callback execution. It may also produce an `error event` if the given function throws an error or + * the returned + * promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * The `position` will be -1 by default to indicate the final argument should + * be used as the callback. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceCallback((arg1, callback) => { + * // Do something + * callback(null, 'result'); + * }, 1, { + * some: 'thing', + * }, thisArg, arg1, callback); + * ``` + * + * The callback will also be run with `channel.runStores(context, ...)` which + * enables context loss recovery in some cases. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * const myStore = new AsyncLocalStorage(); + * + * // The start channel sets the initial store data to something + * // and stores that store data value on the trace context object + * channels.start.bindStore(myStore, (data) => { + * const span = new Span(data); + * data.span = span; + * return span; + * }); + * + * // Then asyncStart can restore from that data it stored previously + * channels.asyncStart.bindStore(myStore, (data) => { + * return data.span; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn callback using function to wrap a trace around + * @param position Zero-indexed argument position of expected callback + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceCallback any>( + fn: Fn, + position: number | undefined, + context: ContextType | undefined, + thisArg: any, + ...args: Parameters + ): void; + } +} +declare module "node:diagnostics_channel" { + export * from "diagnostics_channel"; +} diff --git a/node_modules/@types/node/dns.d.ts b/node_modules/@types/node/dns.d.ts new file mode 100644 index 0000000..5bc96d7 --- /dev/null +++ b/node_modules/@types/node/dns.d.ts @@ -0,0 +1,853 @@ +/** + * The `node:dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * const dns = require('node:dns'); + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `node:dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * const dns = require('node:dns'); + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the [Implementation considerations section](https://nodejs.org/docs/latest-v20.x/api/dns.html#implementation-considerations) for more information. + * @see [source](https://github.com/nodejs/node/blob/v20.11.1/lib/dns.js) + */ +declare module "dns" { + import * as dnsPromises from "node:dns/promises"; + // Supported getaddrinfo flags. + /** + * Limits returned address types to the types of non-loopback addresses configured on the system. For example, IPv4 addresses are + * only returned if the current system has at least one IPv4 address configured. + */ + export const ADDRCONFIG: number; + /** + * If the IPv6 family was specified, but no IPv6 addresses were found, then return IPv4 mapped IPv6 addresses. It is not supported + * on some operating systems (e.g. FreeBSD 10.1). + */ + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + /** + * The record family. Must be `4`, `6`, or `0`. For backward compatibility reasons,`'IPv4'` and `'IPv6'` are interpreted + * as `4` and `6` respectively. The value 0 indicates that either an IPv4 or IPv6 address is returned. If the value `0` is used + * with `{ all: true } (see below)`, both IPv4 and IPv6 addresses are returned. + * @default 0 + */ + family?: number | "IPv4" | "IPv6" | undefined; + /** + * One or more [supported `getaddrinfo`](https://nodejs.org/docs/latest-v20.x/api/dns.html#supported-getaddrinfo-flags) flags. Multiple flags may be + * passed by bitwise `OR`ing their values. + */ + hints?: number | undefined; + /** + * When `true`, the callback returns all resolved addresses in an array. Otherwise, returns a single address. + * @default false + */ + all?: boolean | undefined; + /** + * When `true`, the callback receives IPv4 and IPv6 addresses in the order the DNS resolver returned them. When `false`, IPv4 + * addresses are placed before IPv6 addresses. Default value is configurable using {@link setDefaultResultOrder()} + * or [`--dns-result-order`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--dns-result-orderorder). + * @default true (addresses are not reordered) + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + /** + * A string representation of an IPv4 or IPv6 address. + */ + address: string; + /** + * `4` or `6`, denoting the family of `address`, or `0` if the address is not an IPv4 or IPv6 address. `0` is a likely indicator of a + * bug in the name resolution service used by the operating system. + */ + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is `0` or not provided, then + * IPv4 and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to `(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v20.x/api/dns.html#implementation-considerations) + * before using `dns.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v20.x/api/util.html#utilpromisifyoriginal) ed + * version, and `all` is not set to `true`, it returns a `Promise` for an `Object` with `address` and `family` properties. + * @since v0.1.90 + */ + export function lookup( + hostname: string, + family: number, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupAllOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void, + ): void; + export function lookup( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown. + * + * On an error, `err` is an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, + * where `err.code` is the error code. + * + * ```js + * const dns = require('node:dns'); + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its [util.promisify()](https://nodejs.org/docs/latest-v20.x/api/util.html#utilpromisifyoriginal) ed + * version, it returns a `Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService( + address: string, + port: number, + callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void, + ): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: "A"; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: "AAAA"; + } + export interface CaaRecord { + critical: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: "MX"; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: "NAPTR"; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: "SOA"; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: "SRV"; + } + export interface AnyTxtRecord { + type: "TXT"; + entries: string[]; + } + export interface AnyNsRecord { + type: "NS"; + value: string; + } + export interface AnyPtrRecord { + type: "PTR"; + value: string; + } + export interface AnyCnameRecord { + type: "CNAME"; + value: string; + } + export type AnyRecord = + | AnyARecord + | AnyAaaaRecord + | AnyCnameRecord + | AnyMxRecord + | AnyNaptrRecord + | AnyNsRecord + | AnyPtrRecord + | AnySoaRecord + | AnySrvRecord + | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments `(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, + * where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "A", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "AAAA", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "ANY", + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "CNAME", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "MX", + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NAPTR", + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NS", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "PTR", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SOA", + callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SRV", + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "TXT", + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: string, + callback: ( + err: NodeJS.ErrnoException | null, + addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[], + ) => void, + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise; + function __promisify__(hostname: string, rrtype: "ANY"): Promise; + function __promisify__(hostname: string, rrtype: "MX"): Promise; + function __promisify__(hostname: string, rrtype: "NAPTR"): Promise; + function __promisify__(hostname: string, rrtype: "SOA"): Promise; + function __promisify__(hostname: string, rrtype: "SRV"): Promise; + function __promisify__(hostname: string, rrtype: "TXT"): Promise; + function __promisify__( + hostname: string, + rrtype: string, + ): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname` (e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void, + ): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange` property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname` (e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void, + ): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY` queries. It may be better to call individual methods like {@link resolve4}, {@link resolveMx}, and so on. For more details, see + * [RFC 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is + * one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes). + * @since v0.1.16 + */ + export function reverse( + ip: string, + callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void, + ): void; + /** + * Get the default value for `verbatim` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v18.17.0 + */ + export function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve}, `dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a `NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of [RFC 5952](https://datatracker.ietf.org/doc/html/rfc5952#section-6) formatted addresses + */ + export function setServers(servers: readonly string[]): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `verbatim` in {@link lookup} and [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and {@link setDefaultResultOrder} have higher + * priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--dns-result-orderorder). When using + * [worker threads](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html), {@link setDefaultResultOrder} from the main + * thread won't affect the default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + // Error codes + export const NODATA: "NODATA"; + export const FORMERR: "FORMERR"; + export const SERVFAIL: "SERVFAIL"; + export const NOTFOUND: "NOTFOUND"; + export const NOTIMP: "NOTIMP"; + export const REFUSED: "REFUSED"; + export const BADQUERY: "BADQUERY"; + export const BADNAME: "BADNAME"; + export const BADFAMILY: "BADFAMILY"; + export const BADRESP: "BADRESP"; + export const CONNREFUSED: "TIMEOUT"; + export const TIMEOUT: "TIMEOUT"; + export const EOF: "EOF"; + export const FILE: "FILE"; + export const NOMEM: "NOMEM"; + export const DESTRUCTION: "DESTRUCTION"; + export const BADSTR: "BADSTR"; + export const BADFLAGS: "BADFLAGS"; + export const NONAME: "NONAME"; + export const BADHINTS: "BADHINTS"; + export const NOTINITIALIZED: "NOTINITIALIZED"; + export const LOADIPHLPAPI: "LOADIPHLPAPI"; + export const ADDRGETNETWORKPARAMS: "ADDRGETNETWORKPARAMS"; + export const CANCELLED: "CANCELLED"; + export interface ResolverOptions { + /** + * Query timeout in milliseconds, or `-1` to use the default timeout. + */ + timeout?: number | undefined; + /** + * The number of tries the resolver will try contacting each name server before giving up. + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnssetserversservers) does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns'); + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `node:dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module "node:dns" { + export * from "dns"; +} diff --git a/node_modules/@types/node/dns/promises.d.ts b/node_modules/@types/node/dns/promises.d.ts new file mode 100644 index 0000000..8256fcd --- /dev/null +++ b/node_modules/@types/node/dns/promises.d.ts @@ -0,0 +1,473 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `require('node:dns').promises` or `require('node:dns/promises')`. + * @since v10.6.0 + */ +declare module "dns/promises" { + import { + AnyRecord, + CaaRecord, + LookupAddress, + LookupAllOptions, + LookupOneOptions, + LookupOptions, + MxRecord, + NaptrRecord, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + ResolveWithTtlOptions, + SoaRecord, + SrvRecord, + } from "node:dns"; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses` being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * [`dnsPromises.lookup()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options) does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the [Implementation considerations section](https://nodejs.org/docs/latest-v20.x/api/dns.html#implementation-considerations) before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError` will be thrown. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` is the error code. + * + * ```js + * const dnsPromises = require('node:dns').promises; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` + * is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes). + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: "A"): Promise; + function resolve(hostname: string, rrtype: "AAAA"): Promise; + function resolve(hostname: string, rrtype: "ANY"): Promise; + function resolve(hostname: string, rrtype: "CAA"): Promise; + function resolve(hostname: string, rrtype: "CNAME"): Promise; + function resolve(hostname: string, rrtype: "MX"): Promise; + function resolve(hostname: string, rrtype: "NAPTR"): Promise; + function resolve(hostname: string, rrtype: "NS"): Promise; + function resolve(hostname: string, rrtype: "PTR"): Promise; + function resolve(hostname: string, rrtype: "SOA"): Promise; + function resolve(hostname: string, rrtype: "SRV"): Promise; + function resolve(hostname: string, rrtype: "TXT"): Promise; + function resolve( + hostname: string, + rrtype: string, + ): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the `hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR` records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the `hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the `hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the `hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the `hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an [`Error`](https://nodejs.org/docs/latest-v20.x/api/errors.html#class-error) object, where `err.code` + * is one of the [DNS error codes](https://nodejs.org/docs/latest-v20.x/api/dns.html#error-codes). + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Get the default value for `verbatim` in {@link lookup} and [dnsPromises.lookup()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromiseslookuphostname-options). + * The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: readonly string[]): void; + /** + * Set the default value of `verbatim` in `dns.lookup()` and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and [dnsPromises.setDefaultResultOrder()](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder) + * have higher priority than [`--dns-result-order`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--dns-result-orderorder). + * When using [worker threads](https://nodejs.org/docs/latest-v20.x/api/worker_threads.html), [`dnsPromises.setDefaultResultOrder()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetdefaultresultorderorder) + * from the main thread won't affect the default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + const NODATA: "NODATA"; + const FORMERR: "FORMERR"; + const SERVFAIL: "SERVFAIL"; + const NOTFOUND: "NOTFOUND"; + const NOTIMP: "NOTIMP"; + const REFUSED: "REFUSED"; + const BADQUERY: "BADQUERY"; + const BADNAME: "BADNAME"; + const BADFAMILY: "BADFAMILY"; + const BADRESP: "BADRESP"; + const CONNREFUSED: "TIMEOUT"; + const TIMEOUT: "TIMEOUT"; + const EOF: "EOF"; + const FILE: "FILE"; + const NOMEM: "NOMEM"; + const DESTRUCTION: "DESTRUCTION"; + const BADSTR: "BADSTR"; + const BADFLAGS: "BADFLAGS"; + const NONAME: "NONAME"; + const BADHINTS: "BADHINTS"; + const NOTINITIALIZED: "NOTINITIALIZED"; + const LOADIPHLPAPI: "LOADIPHLPAPI"; + const ADDRGETNETWORKPARAMS: "ADDRGETNETWORKPARAMS"; + const CANCELLED: "CANCELLED"; + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using [`resolver.setServers()`](https://nodejs.org/docs/latest-v20.x/api/dns.html#dnspromisessetserversservers) does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns').promises; + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org').then((addresses) => { + * // ... + * }); + * + * // Alternatively, the same code can be written using async-await style. + * (async function() { + * const addresses = await resolver.resolve4('example.org'); + * })(); + * ``` + * + * The following methods from the `dnsPromises` API are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v10.6.0 + */ + class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module "node:dns/promises" { + export * from "dns/promises"; +} diff --git a/node_modules/@types/node/dom-events.d.ts b/node_modules/@types/node/dom-events.d.ts new file mode 100644 index 0000000..f47f71d --- /dev/null +++ b/node_modules/@types/node/dom-events.d.ts @@ -0,0 +1,124 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any; Event: any } ? {} + : { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?]; + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; + }; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any; EventTarget: any } ? {} + : { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; + }; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; + /** The listener will be removed when the given AbortSignal object's `abort()` method is called. */ + signal?: AbortSignal; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from "events"; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any; Event: infer T } ? T + : { + prototype: __Event; + new(type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any; EventTarget: infer T } ? T + : { + prototype: __EventTarget; + new(): __EventTarget; + }; +} diff --git a/node_modules/@types/node/domain.d.ts b/node_modules/@types/node/domain.d.ts new file mode 100644 index 0000000..72f17bd --- /dev/null +++ b/node_modules/@types/node/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the`process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/domain.js) + */ +declare module "domain" { + import EventEmitter = require("node:events"); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and`intercept()` methods to set the active domain. It sets `domain.active` and`process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context,`exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and low-level requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * const domain = require('node:domain'); + * const fs = require('node:fs'); + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module "node:domain" { + export * from "domain"; +} diff --git a/node_modules/@types/node/events.d.ts b/node_modules/@types/node/events.d.ts new file mode 100644 index 0000000..fb7289f --- /dev/null +++ b/node_modules/@types/node/events.d.ts @@ -0,0 +1,892 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/events.js) + */ +declare module "events" { + import { AsyncResource, AsyncResourceOptions } from "node:async_hooks"; + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + // Any EventTarget with a DOM-style `addEventListener` + interface _DOMEventTarget { + addEventListener( + eventName: string, + listener: (...args: any[]) => void, + opts?: { + once: boolean; + }, + ): any; + } + interface StaticEventEmitterOptions { + signal?: AbortSignal | undefined; + } + interface EventEmitter = DefaultEventMap> extends NodeJS.EventEmitter {} + type EventMap = Record | DefaultEventMap; + type DefaultEventMap = [never]; + type AnyRest = [...args: any[]]; + type Args = T extends DefaultEventMap ? AnyRest : ( + K extends keyof T ? T[K] : never + ); + type Key = T extends DefaultEventMap ? string | symbol : K | keyof T; + type Key2 = T extends DefaultEventMap ? string | symbol : K & keyof T; + type Listener = T extends DefaultEventMap ? F : ( + K extends keyof T ? ( + T[K] extends unknown[] ? (...args: T[K]) => void : never + ) + : never + ); + type Listener1 = Listener void>; + type Listener2 = Listener; + + /** + * The `EventEmitter` class is defined and exposed by the `node:events` module: + * + * ```js + * import { EventEmitter } from 'node:events'; + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter = DefaultEventMap> { + constructor(options?: EventEmitterOptions); + + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * import { once, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.error('error happened', err); + * } + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()`is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.error('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once( + emitter: NodeJS.EventEmitter, + eventName: string | symbol, + options?: StaticEventEmitterOptions, + ): Promise; + static once(emitter: _DOMEventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * @since v13.6.0, v12.16.0 + * @param eventName The name of the event being listened for + * @return that iterates `eventName` events emitted by the `emitter` + */ + static on( + emitter: NodeJS.EventEmitter, + eventName: string, + options?: StaticEventEmitterOptions, + ): AsyncIterableIterator; + /** + * A class method that returns the number of listeners for the given `eventName`registered on the given `emitter`. + * + * ```js + * import { EventEmitter, listenerCount } from 'node:events'; + * + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * import { getEventListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * console.log(getEventListeners(ee, 'foo')); // [ [Function: listener] ] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * console.log(getEventListeners(et, 'foo')); // [ [Function: listener] ] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * Returns the currently set max amount of listeners. + * + * For `EventEmitter`s this behaves exactly the same as calling `.getMaxListeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the max event listeners for the + * event target. If the number of event handlers on a single EventTarget exceeds + * the max set, the EventTarget will print a warning. + * + * ```js + * import { getMaxListeners, setMaxListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * console.log(getMaxListeners(ee)); // 10 + * setMaxListeners(11, ee); + * console.log(getMaxListeners(ee)); // 11 + * } + * { + * const et = new EventTarget(); + * console.log(getMaxListeners(et)); // 10 + * setMaxListeners(11, et); + * console.log(getMaxListeners(et)); // 11 + * } + * ``` + * @since v19.9.0 + */ + static getMaxListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter): number; + /** + * ```js + * import { setMaxListeners, EventEmitter } from 'node:events'; + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventsTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array<_DOMEventTarget | NodeJS.EventEmitter>): void; + /** + * Listens once to the `abort` event on the provided `signal`. + * + * Listening to the `abort` event on abort signals is unsafe and may + * lead to resource leaks since another third party with the signal can + * call `e.stopImmediatePropagation()`. Unfortunately Node.js cannot change + * this since it would violate the web standard. Additionally, the original + * API makes it easy to forget to remove listeners. + * + * This API allows safely using `AbortSignal`s in Node.js APIs by solving these + * two issues by listening to the event such that `stopImmediatePropagation` does + * not prevent the listener from running. + * + * Returns a disposable so that it may be unsubscribed from more easily. + * + * ```js + * import { addAbortListener } from 'node:events'; + * + * function example(signal) { + * let disposable; + * try { + * signal.addEventListener('abort', (e) => e.stopImmediatePropagation()); + * disposable = addAbortListener(signal, (e) => { + * // Do something when signal is aborted. + * }); + * } finally { + * disposable?.[Symbol.dispose](); + * } + * } + * ``` + * @since v20.5.0 + * @experimental + * @return Disposable that removes the `abort` listener. + */ + static addAbortListener(signal: AbortSignal, resource: (event: Event) => void): Disposable; + /** + * This symbol shall be used to install a listener for only monitoring `'error'`events. Listeners installed using this symbol are called before the regular`'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an`'error'` event is emitted. Therefore, the process will still crash if no + * regular `'error'` listener is installed. + * @since v13.6.0, v12.17.0 + */ + static readonly errorMonitor: unique symbol; + /** + * Value: `Symbol.for('nodejs.rejection')` + * + * See how to write a custom `rejection handler`. + * @since v13.4.0, v12.16.0 + */ + static readonly captureRejectionSymbol: unique symbol; + /** + * Value: [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) + * + * Change the default `captureRejections` option on all new `EventEmitter` objects. + * @since v13.4.0, v12.16.0 + */ + static captureRejections: boolean; + /** + * By default, a maximum of `10` listeners can be registered for any single + * event. This limit can be changed for individual `EventEmitter` instances + * using the `emitter.setMaxListeners(n)` method. To change the default + * for _all_`EventEmitter` instances, the `events.defaultMaxListeners`property can be used. If this value is not a positive number, a `RangeError`is thrown. + * + * Take caution when setting the `events.defaultMaxListeners` because the + * change affects _all_`EventEmitter` instances, including those created before + * the change is made. However, calling `emitter.setMaxListeners(n)` still has + * precedence over `events.defaultMaxListeners`. + * + * This is not a hard limit. The `EventEmitter` instance will allow + * more listeners to be added but will output a trace warning to stderr indicating + * that a "possible EventEmitter memory leak" has been detected. For any single`EventEmitter`, the `emitter.getMaxListeners()` and `emitter.setMaxListeners()`methods can be used to + * temporarily avoid this warning: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.setMaxListeners(emitter.getMaxListeners() + 1); + * emitter.once('event', () => { + * // do stuff + * emitter.setMaxListeners(Math.max(emitter.getMaxListeners() - 1, 0)); + * }); + * ``` + * + * The `--trace-warnings` command-line flag can be used to display the + * stack trace for such warnings. + * + * The emitted warning can be inspected with `process.on('warning')` and will + * have the additional `emitter`, `type`, and `count` properties, referring to + * the event emitter instance, the event's name and the number of attached + * listeners, respectively. + * Its `name` property is set to `'MaxListenersExceededWarning'`. + * @since v0.11.2 + */ + static defaultMaxListeners: number; + } + import internal = require("node:events"); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + + export interface EventEmitterReferencingAsyncResource extends AsyncResource { + readonly eventEmitter: EventEmitterAsyncResource; + } + + export interface EventEmitterAsyncResourceOptions extends AsyncResourceOptions, EventEmitterOptions { + /** + * The type of async event, this is required when instantiating `EventEmitterAsyncResource` + * directly rather than as a child class. + * @default new.target.name if instantiated as a child class. + */ + name?: string; + } + + /** + * Integrates `EventEmitter` with `AsyncResource` for `EventEmitter`s that + * require manual async tracking. Specifically, all events emitted by instances + * of `events.EventEmitterAsyncResource` will run within its `async context`. + * + * ```js + * import { EventEmitterAsyncResource, EventEmitter } from 'node:events'; + * import { notStrictEqual, strictEqual } from 'node:assert'; + * import { executionAsyncId, triggerAsyncId } from 'node:async_hooks'; + * + * // Async tracking tooling will identify this as 'Q'. + * const ee1 = new EventEmitterAsyncResource({ name: 'Q' }); + * + * // 'foo' listeners will run in the EventEmitters async context. + * ee1.on('foo', () => { + * strictEqual(executionAsyncId(), ee1.asyncId); + * strictEqual(triggerAsyncId(), ee1.triggerAsyncId); + * }); + * + * const ee2 = new EventEmitter(); + * + * // 'foo' listeners on ordinary EventEmitters that do not track async + * // context, however, run in the same async context as the emit(). + * ee2.on('foo', () => { + * notStrictEqual(executionAsyncId(), ee2.asyncId); + * notStrictEqual(triggerAsyncId(), ee2.triggerAsyncId); + * }); + * + * Promise.resolve().then(() => { + * ee1.emit('foo'); + * ee2.emit('foo'); + * }); + * ``` + * + * The `EventEmitterAsyncResource` class has the same methods and takes the + * same options as `EventEmitter` and `AsyncResource` themselves. + * @since v17.4.0, v16.14.0 + */ + export class EventEmitterAsyncResource extends EventEmitter { + /** + * @param options Only optional in child class. + */ + constructor(options?: EventEmitterAsyncResourceOptions); + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + */ + emitDestroy(): void; + /** + * The unique `asyncId` assigned to the resource. + */ + readonly asyncId: number; + /** + * The same triggerAsyncId that is passed to the AsyncResource constructor. + */ + readonly triggerAsyncId: number; + /** + * The returned `AsyncResource` object has an additional `eventEmitter` property + * that provides a reference to this `EventEmitterAsyncResource`. + */ + readonly asyncResource: EventEmitterReferencingAsyncResource; + } + } + global { + namespace NodeJS { + interface EventEmitter = DefaultEventMap> { + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: Key, listener: Listener1): this; + /** + * Adds the `listener` function to the end of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: Key, listener: Listener1): this; + /** + * Removes the specified `listener` from the listener array for the event named`eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any`removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * import { EventEmitter } from 'node:events'; + * class MyEmitter extends EventEmitter {} + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')`listener is removed: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: Key, listener: Listener1): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: Key, listener: Listener1): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(eventName?: Key): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to`Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: Key): Array>; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: Key): Array>; + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: Key, ...args: Args): boolean; + /** + * Returns the number of listeners listening for the event named `eventName`. + * If `listener` is provided, it will return how many times the listener is found + * in the list of the listeners of the event. + * @since v3.2.0 + * @param eventName The name of the event being listened for + * @param listener The event handler function + */ + listenerCount(eventName: Key, listener?: Listener2): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: Key, listener: Listener1): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array<(string | symbol) & Key2>; + } + } + } + export = EventEmitter; +} +declare module "node:events" { + import events = require("events"); + export = events; +} diff --git a/node_modules/@types/node/fs.d.ts b/node_modules/@types/node/fs.d.ts new file mode 100644 index 0000000..5cbca12 --- /dev/null +++ b/node_modules/@types/node/fs.d.ts @@ -0,0 +1,4311 @@ +/** + * The `node:fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'node:fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'node:fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/fs.js) + */ +declare module "fs" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import { URL } from "node:url"; + import * as promises from "node:fs/promises"; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | "buffer" + | { + encoding: "buffer"; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat}, {@link fstat}, and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats {} + export interface StatsFsBase { + /** Type of file system. */ + type: T; + /** Optimal transfer block size. */ + bsize: T; + /** Total data blocks in file system. */ + blocks: T; + /** Free blocks in file system. */ + bfree: T; + /** Available blocks for unprivileged users */ + bavail: T; + /** Total file nodes in file system. */ + files: T; + /** Free file nodes in file system. */ + ffree: T; + } + export interface StatsFs extends StatsFsBase {} + /** + * Provides information about a mounted file system. + * + * Objects returned from {@link statfs} and its synchronous counterpart are of + * this type. If `bigint` in the `options` passed to those methods is `true`, the + * numeric values will be `bigint` instead of `number`. + * + * ```console + * StatFs { + * type: 1397114950, + * bsize: 4096, + * blocks: 121938943, + * bfree: 61058895, + * bavail: 61058895, + * files: 999, + * ffree: 1000000 + * } + * ``` + * + * `bigint` version: + * + * ```console + * StatFs { + * type: 1397114950n, + * bsize: 4096n, + * blocks: 121938943n, + * bfree: 61058895n, + * bavail: 61058895n, + * files: 999n, + * ffree: 1000000n + * } + * ``` + * @since v19.6.0, v18.15.0 + */ + export class StatsFs {} + export interface BigIntStatsFs extends StatsFsBase {} + export interface StatFsOptions { + bigint?: boolean | undefined; + } + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + /** + * The base path that this `fs.Dirent` object refers to. + * @since v20.1.0 + */ + path: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be fulfilled after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be fulfilled with an `fs.Dirent`, or `null`if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.FSWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.FSWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.FSWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.FSWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + /** + * events.EventEmitter + * 1. change + * 2. close + * 3. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "ready", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "ready", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "ready", listener: () => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "ready", listener: () => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'node:fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'node:fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * {@link stat} follows symbolic links. Use {@link lstat} to look at the + * links themselves. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'node:fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + }, + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + }, + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + }, + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + }, + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + }, + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Asynchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. The callback gets two arguments `(err, stats)` where `stats`is an `fs.StatFs` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfs(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void): void; + export function statfs( + path: PathLike, + options: + | (StatFsOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs | BigIntStatsFs) => void, + ): void; + export namespace statfs { + /** + * Asynchronous statfs(2) - Returns information about the mounted file system which contains path. The callback gets two arguments (err, stats) where stats is an object. + * @param path A path to an existing file or directory on the file system to be queried. + */ + function __promisify__( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatFsOptions): Promise; + } + /** + * Synchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfsSync( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): StatsFs; + export function statfsSync( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): BigIntStatsFs; + export function statfsSync(path: PathLike, options?: StatFsOptions): StatsFs | BigIntStatsFs; + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not a string, Node.js will autodetect `target` type and use `'file'` or `'dir'`. + * If the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. Junction + * points on NTFS volumes can only point to directories. + * + * Relative targets are relative to the link's parent directory. + * + * ```js + * import { symlink } from 'node:fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + * @param [type='null'] + */ + export function symlink( + target: PathLike, + path: PathLike, + type: symlink.Type | undefined | null, + callback: NoParamCallback, + ): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = "dir" | "file" | "junction"; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + * @param [type='null'] + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..`, and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd`to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + function native( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + function native( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm`utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created (for instance, if it was previously created). + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. If `recursive` is false and the directory exists, + * an `EEXIST` error occurs. + * + * ```js + * import { mkdir } from 'node:fs'; + * + * // Create ./tmp/a/apple, regardless of whether ./tmp and ./tmp/a exist. + * mkdir('./tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'node:fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: Mode | MakeDirectoryOptions | null | undefined, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: Mode | MakeDirectoryOptions | null, + ): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is`true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required`prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * mkdtemp(join(tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`require('node:path').sep`). + * + * ```js + * import { tmpdir } from 'node:os'; + * import { mkdtemp } from 'node:fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'node:path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | "buffer" + | { + encoding: "buffer"; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp( + prefix: string, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)`where `files` is an array of the names of the files in the directory excluding`'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void, + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | "buffer" + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + mode: Mode | undefined | null, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds,`Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where`bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write( + fd: number, + buffer: TBuffer, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write( + fd: number, + string: string, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + * @return The number of bytes written. + */ + export function writeSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset?: number | null, + length?: number | null, + position?: number | null, + ): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + export function read( + fd: number, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void, + ): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + * @param [position='null'] + */ + export function readSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset: number, + length: number, + position: ReadPosition | null, + ): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile( + path: PathOrFileDescriptor, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of`fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): string | Buffer; + export type WriteFileOptions = + | ( + & ObjectEncodingOptions + & Abortable + & { + mode?: Mode | undefined; + flag?: string | undefined; + flush?: boolean | undefined; + } + ) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling`fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'node:fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + callback: NoParamCallback, + ): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__( + file: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'node:fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'node:fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: StatsListener, + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: BigIntStatsListener, + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: StatsListener): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and`fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()`and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: StatsListener): void; + export function unwatchFile(filename: PathLike, listener?: BigIntStatsListener): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | "buffer" | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = "rename" | "change"; + export type WatchListener = (event: WatchEventType, filename: T | null) => void; + export type StatsListener = (curr: Stats, prev: Stats) => void; + export type BigIntStatsListener = (curr: BigIntStats, prev: BigIntStats) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options?: WatchOptions | BufferEncoding | null, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options: WatchOptions | string, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'node:fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err`parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won't be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback`parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'node:fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if`package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'node:fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value`fs.constants.F_OK` or a mask consisting of the bitwise OR of any of`fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'node:fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + signal?: AbortSignal | null | undefined; + highWaterMark?: number | undefined; + } + interface FSImplementation { + open?: (...args: any[]) => any; + close?: (...args: any[]) => any; + } + interface CreateReadStreamFSImplementation extends FSImplementation { + read: (...args: any[]) => any; + } + interface CreateWriteStreamFSImplementation extends FSImplementation { + write: (...args: any[]) => any; + writev?: (...args: any[]) => any; + } + interface ReadStreamOptions extends StreamOptions { + fs?: CreateReadStreamFSImplementation | null | undefined; + end?: number | undefined; + } + interface WriteStreamOptions extends StreamOptions { + fs?: CreateWriteStreamFSImplementation | null | undefined; + flush?: boolean | undefined; + } + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs`implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for`open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs`implementations for `open`, `write`, `writev`, and `close`. Overriding `write()`without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of`write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close`is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the`path` argument and will use the specified file descriptor. This means that no`'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | WriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'node:fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'node:fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using`writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and`buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an`Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] + */ + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @param [position='null'] + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and`buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + */ + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + + export interface OpenAsBlobOptions { + /** + * An optional mime type for the blob. + * + * @default 'undefined' + */ + type?: string | undefined; + } + + /** + * Returns a `Blob` whose data is backed by the given file. + * + * The file must not be modified after the `Blob` is created. Any modifications + * will cause reading the `Blob` data to fail with a `DOMException` error. + * Synchronous stat operations on the file when the `Blob` is created, and before + * each read in order to detect whether the file data has been modified on disk. + * + * ```js + * import { openAsBlob } from 'node:fs'; + * + * const blob = await openAsBlob('the.file.txt'); + * const ab = await blob.arrayBuffer(); + * blob.stream(); + * ``` + * @since v19.8.0 + * @experimental + */ + export function openAsBlob(path: PathLike, options?: OpenAsBlobOptions): Promise; + + export interface OpenDirOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + /** + * @default false + */ + recursive?: boolean; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir( + path: PathLike, + options: OpenDirOptions, + cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void, + ): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * Modifiers for copy operation. See `mode` flag of {@link copyFileSync()} + */ + mode?: number; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp( + source: string | URL, + destination: string | URL, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + export function cp( + source: string | URL, + destination: string | URL, + opts: CopyOptions, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; +} +declare module "node:fs" { + export * from "fs"; +} diff --git a/node_modules/@types/node/fs/promises.d.ts b/node_modules/@types/node/fs/promises.d.ts new file mode 100644 index 0000000..7d8ef52 --- /dev/null +++ b/node_modules/@types/node/fs/promises.d.ts @@ -0,0 +1,1245 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module "fs/promises" { + import { Abortable } from "node:events"; + import { Stream } from "node:stream"; + import { ReadableStream } from "node:stream/web"; + import { + BigIntStats, + BigIntStatsFs, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatFsOptions, + StatOptions, + Stats, + StatsFs, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from "node:fs"; + import { Interface as ReadlineInterface } from "node:readline"; + interface FileChangeInfo { + eventType: WatchEventType; + filename: T | null; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + flush?: boolean | undefined; + } + interface ReadableWebStreamOptions { + /** + * Whether to open a normal or a `'bytes'` stream. + * @since v20.0.0 + */ + type?: "bytes" | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read( + buffer: T, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called + * after the `FileHandle` is closed or closing. + * + * ```js + * import { + * open, + * } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not + * close the `FileHandle` automatically. User code must still call the`fileHandle.close()` method. + * @since v17.0.0 + * @experimental + */ + readableWebStream(options?: ReadableWebStreamOptions): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a`filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. + * See `filehandle.createReadStream()` for the options. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * @since v18.11.0 + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + }, + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then fulfills the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is fulfilled with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & Abortable & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Write `buffer` to the file. + * + * The promise is fulfilled with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param offset The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param [position='null'] The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current + * position. See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is fulfilled with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be fulfilled (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param [position='null'] The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + /** + * An alias for {@link FileHandle.close()}. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + const constants: typeof fsConstants; + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is fulfilled with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access, constants } from 'node:fs/promises'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { copyFile, constants } from 'node:fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len`bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR`error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * + * ```js + * import { mkdir } from 'node:fs/promises'; + * + * try { + * const projectFolder = new URL('./test/project/', import.meta.url); + * const createDir = await mkdir(projectFolder, { recursive: true }); + * + * console.log(`created ${createDir}`); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the returned array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'node:fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * fulfilled with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`,`'file'`, or `'junction'`. If the `type` argument is not a string, Node.js will + * autodetect `target` type and use `'file'` or `'dir'`. If the `target` does not + * exist, `'file'` will be used. Windows junction points require the destination + * path to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. Junction points on NTFS volumes + * can only point to directories. + * @since v10.0.0 + * @param [type='null'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v19.6.0, v18.15.0 + * @return Fulfills with the {fs.StatFs} object for the given `path`. + */ + function statfs( + path: PathLike, + opts?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function statfs( + path: PathLike, + opts: StatFsOptions & { + bigint: true; + }, + ): Promise; + function statfs(path: PathLike, opts?: StatFsOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the`fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath( + path: PathLike, + options?: ObjectEncodingOptions | BufferEncoding | null, + ): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs/promises'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * try { + * await mkdtemp(join(tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory`/tmp`, if the intention is to create a temporary directory _within_`/tmp`, the`prefix` must end with a trailing + * platform-specific path separator + * (`require('node:path').sep`). + * @since v10.0.0 + * @return Fulfills with a string containing the file system path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs/promises'; + * import { Buffer } from 'node:buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: + | string + | NodeJS.ArrayBufferView + | Iterable + | AsyncIterable + | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + /** + * If all data is successfully written to the file, and `flush` + * is `true`, `filehandle.sync()` is used to flush the data. + * @default false + */ + flush?: boolean | undefined; + } & Abortable) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile( + path: PathLike | FileHandle, + data: string | Uint8Array, + options?: (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) | BufferEncoding | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * An example of reading a `package.json` file located in the same directory of the + * running code: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * try { + * const filePath = new URL('./package.json', import.meta.url); + * const contents = await readFile(filePath, { encoding: 'utf8' }); + * console.log(contents); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ( + & ObjectEncodingOptions + & Abortable + & { + flag?: OpenMode | undefined; + } + ) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * const { watch } = require('node:fs/promises'); + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch( + filename: PathLike, + options: WatchOptions | string, + ): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; +} +declare module "node:fs/promises" { + export * from "fs/promises"; +} diff --git a/node_modules/@types/node/globals.d.ts b/node_modules/@types/node/globals.d.ts new file mode 100644 index 0000000..5f25006 --- /dev/null +++ b/node_modules/@types/node/globals.d.ts @@ -0,0 +1,411 @@ +export {}; // Make this a module + +// #region Fetch and friends +// Conditional type aliases, used at the end of this file. +// Will either be empty if lib-dom is included, or the undici version otherwise. +type _Request = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Request; +type _Response = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Response; +type _FormData = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").FormData; +type _Headers = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Headers; +type _RequestInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").RequestInit; +type _ResponseInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").ResponseInit; +type _File = typeof globalThis extends { onmessage: any } ? {} : import("node:buffer").File; +// #endregion Fetch and friends + +declare global { + // Declare "static" methods in Error + interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; + } + + /*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + + // For backwards compability + interface NodeRequire extends NodeJS.Require {} + interface RequireResolve extends NodeJS.RequireResolve {} + interface NodeModule extends NodeJS.Module {} + + var process: NodeJS.Process; + var console: Console; + + var __filename: string; + var __dirname: string; + + var require: NodeRequire; + var module: NodeModule; + + // Same as module.exports + var exports: any; + + /** + * Only available if `--expose-gc` is passed to the process. + */ + var gc: undefined | (() => void); + + // #region borrowed + // from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib + /** A controller object that allows you to abort one or more DOM requests as and when desired. */ + interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(reason?: any): void; + } + + /** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ + interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; + readonly reason: any; + onabort: null | ((this: AbortSignal, event: Event) => any); + throwIfAborted(): void; + } + + var AbortController: typeof globalThis extends { onmessage: any; AbortController: infer T } ? T + : { + prototype: AbortController; + new(): AbortController; + }; + + var AbortSignal: typeof globalThis extends { onmessage: any; AbortSignal: infer T } ? T + : { + prototype: AbortSignal; + new(): AbortSignal; + abort(reason?: any): AbortSignal; + timeout(milliseconds: number): AbortSignal; + }; + // #endregion borrowed + + // #region Disposable + interface SymbolConstructor { + /** + * A method that is used to release resources held by an object. Called by the semantics of the `using` statement. + */ + readonly dispose: unique symbol; + + /** + * A method that is used to asynchronously release resources held by an object. Called by the semantics of the `await using` statement. + */ + readonly asyncDispose: unique symbol; + } + + interface Disposable { + [Symbol.dispose](): void; + } + + interface AsyncDisposable { + [Symbol.asyncDispose](): PromiseLike; + } + // #endregion Disposable + + // #region ArrayLike.at() + interface RelativeIndexable { + /** + * Takes an integer value and returns the item at that index, + * allowing for positive and negative integers. + * Negative integers count back from the last item in the array. + */ + at(index: number): T | undefined; + } + interface String extends RelativeIndexable {} + interface Array extends RelativeIndexable {} + interface ReadonlyArray extends RelativeIndexable {} + interface Int8Array extends RelativeIndexable {} + interface Uint8Array extends RelativeIndexable {} + interface Uint8ClampedArray extends RelativeIndexable {} + interface Int16Array extends RelativeIndexable {} + interface Uint16Array extends RelativeIndexable {} + interface Int32Array extends RelativeIndexable {} + interface Uint32Array extends RelativeIndexable {} + interface Float32Array extends RelativeIndexable {} + interface Float64Array extends RelativeIndexable {} + interface BigInt64Array extends RelativeIndexable {} + interface BigUint64Array extends RelativeIndexable {} + // #endregion ArrayLike.at() end + + /** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ + function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, + ): T; + + /*----------------------------------------------* + * * + * GLOBAL INTERFACES * + * * + *-----------------------------------------------*/ + namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | undefined; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + + /** + * is this an async call (i.e. await, Promise.all(), or Promise.any())? + */ + isAsync(): boolean; + + /** + * is this an async call to Promise.all()? + */ + isPromiseAll(): boolean; + + /** + * returns the index of the promise element that was followed in + * Promise.all() or Promise.any() for async stack traces, or null + * if the CallSite is not an async + */ + getPromiseIndex(): number | null; + + getScriptNameOrSourceURL(): string; + getScriptHash(): string; + + getEnclosingColumnNumber(): number; + getEnclosingLineNumber(): number; + getPosition(): number; + + toString(): string; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream {} + + interface RefCounted { + ref(): this; + unref(): this; + } + + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + ".js": (m: Module, filename: string) => any; + ".json": (m: Module, filename: string) => any; + ".node": (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } + } + + interface RequestInit extends _RequestInit {} + + function fetch( + input: string | URL | globalThis.Request, + init?: RequestInit, + ): Promise; + + interface Request extends _Request {} + var Request: typeof globalThis extends { + onmessage: any; + Request: infer T; + } ? T + : typeof import("undici-types").Request; + + interface ResponseInit extends _ResponseInit {} + + interface Response extends _Response {} + var Response: typeof globalThis extends { + onmessage: any; + Response: infer T; + } ? T + : typeof import("undici-types").Response; + + interface FormData extends _FormData {} + var FormData: typeof globalThis extends { + onmessage: any; + FormData: infer T; + } ? T + : typeof import("undici-types").FormData; + + interface Headers extends _Headers {} + var Headers: typeof globalThis extends { + onmessage: any; + Headers: infer T; + } ? T + : typeof import("undici-types").Headers; + + interface File extends _File {} + var File: typeof globalThis extends { + onmessage: any; + File: infer T; + } ? T + : typeof import("node:buffer").File; +} diff --git a/node_modules/@types/node/globals.global.d.ts b/node_modules/@types/node/globals.global.d.ts new file mode 100644 index 0000000..ef1198c --- /dev/null +++ b/node_modules/@types/node/globals.global.d.ts @@ -0,0 +1 @@ +declare var global: typeof globalThis; diff --git a/node_modules/@types/node/http.d.ts b/node_modules/@types/node/http.d.ts new file mode 100644 index 0000000..98479fc --- /dev/null +++ b/node_modules/@types/node/http.d.ts @@ -0,0 +1,1889 @@ +/** + * To use the HTTP server and client one must `require('node:http')`. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```json + * { "content-length": "123", + * "content-type": "text/plain", + * "connection": "keep-alive", + * "host": "example.com", + * "accept": "*" } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders`property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders`list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http.js) + */ +declare module "http" { + import * as stream from "node:stream"; + import { URL } from "node:url"; + import { LookupOptions } from "node:dns"; + import { EventEmitter } from "node:events"; + import { LookupFunction, Server as NetServer, Socket, TcpSocketConnectOpts } from "node:net"; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + "accept-language"?: string | undefined; + "accept-patch"?: string | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + "alt-svc"?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + connection?: string | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-type"?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + "proxy-authenticate"?: string | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | undefined; + "sec-websocket-version"?: string | undefined; + "set-cookie"?: string[] | undefined; + "strict-transport-security"?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + upgrade?: string | undefined; + "user-agent"?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + "www-authenticate"?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict { + accept?: string | string[] | undefined; + "accept-charset"?: string | string[] | undefined; + "accept-encoding"?: string | string[] | undefined; + "accept-language"?: string | string[] | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + "cdn-cache-control"?: string | undefined; + connection?: string | string[] | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | number | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-security-policy"?: string | undefined; + "content-security-policy-report-only"?: string | undefined; + cookie?: string | string[] | undefined; + dav?: string | string[] | undefined; + dnt?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-range"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + link?: string | string[] | undefined; + location?: string | undefined; + "max-forwards"?: string | undefined; + origin?: string | undefined; + prgama?: string | string[] | undefined; + "proxy-authenticate"?: string | string[] | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + "public-key-pins-report-only"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "referrer-policy"?: string | undefined; + refresh?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | string[] | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | string[] | undefined; + "sec-websocket-version"?: string | undefined; + server?: string | undefined; + "set-cookie"?: string | string[] | undefined; + "strict-transport-security"?: string | undefined; + te?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + "user-agent"?: string | undefined; + upgrade?: string | undefined; + "upgrade-insecure-requests"?: string | undefined; + vary?: string | undefined; + via?: string | string[] | undefined; + warning?: string | undefined; + "www-authenticate"?: string | string[] | undefined; + "x-content-type-options"?: string | undefined; + "x-dns-prefetch-control"?: string | undefined; + "x-frame-options"?: string | undefined; + "x-xss-protection"?: string | undefined; + } + interface ClientRequestArgs { + _defaultAgent?: Agent | undefined; + agent?: Agent | boolean | undefined; + auth?: string | null | undefined; + createConnection?: + | (( + options: ClientRequestArgs, + oncreate: (err: Error | null, socket: stream.Duplex) => void, + ) => stream.Duplex | null | undefined) + | undefined; + defaultPort?: number | string | undefined; + family?: number | undefined; + headers?: OutgoingHttpHeaders | undefined; + hints?: LookupOptions["hints"]; + host?: string | null | undefined; + hostname?: string | null | undefined; + insecureHTTPParser?: boolean | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + lookup?: LookupFunction | undefined; + /** + * @default 16384 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + port?: number | string | null | undefined; + protocol?: string | null | undefined; + setHost?: boolean | undefined; + signal?: AbortSignal | undefined; + socketPath?: string | undefined; + timeout?: number | undefined; + uniqueHeaders?: Array | undefined; + joinDuplicateHeaders?: boolean; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > { + /** + * Specifies the `IncomingMessage` class to be used. Useful for extending the original `IncomingMessage`. + */ + IncomingMessage?: Request | undefined; + /** + * Specifies the `ServerResponse` class to be used. Useful for extending the original `ServerResponse`. + */ + ServerResponse?: Response | undefined; + /** + * Sets the timeout value in milliseconds for receiving the entire request from the client. + * @see Server.requestTimeout for more information. + * @default 300000 + * @since v18.0.0 + */ + requestTimeout?: number | undefined; + /** + * It joins the field line values of multiple headers in a request with `, ` instead of discarding the duplicates. + * @default false + * @since v18.14.0 + */ + joinDuplicateHeaders?: boolean; + /** + * The number of milliseconds of inactivity a server needs to wait for additional incoming data, + * after it has finished writing the last response, before a socket will be destroyed. + * @see Server.keepAliveTimeout for more information. + * @default 5000 + * @since v18.0.0 + */ + keepAliveTimeout?: number | undefined; + /** + * Sets the interval value in milliseconds to check for request and headers timeout in incomplete requests. + * @default 30000 + */ + connectionsCheckingInterval?: number | undefined; + /** + * Optionally overrides all `socket`s' `readableHighWaterMark` and `writableHighWaterMark`. + * This affects `highWaterMark` property of both `IncomingMessage` and `ServerResponse`. + * Default: @see stream.getDefaultHighWaterMark(). + * @since v20.1.0 + */ + highWaterMark?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when `true`. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * Optionally overrides the value of + * `--max-http-header-size` for requests received by this server, i.e. + * the maximum length of request headers in bytes. + * @default 16384 + * @since v13.3.0 + */ + maxHeaderSize?: number | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default true + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + /** + * A list of response headers that should be sent only once. + * If the header's value is an array, the items will be joined using `; `. + */ + uniqueHeaders?: Array | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e.,`server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request + * or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: RequestListener): this; + addListener(event: "checkExpectation", listener: RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + addListener(event: "request", listener: RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "clientError", err: Error, socket: stream.Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit(event: "dropRequest", req: InstanceType, socket: stream.Duplex): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: RequestListener): this; + on(event: "checkExpectation", listener: RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + on(event: "request", listener: RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: RequestListener): this; + once(event: "checkExpectation", listener: RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + once(event: "request", listener: RequestListener): this; + once( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: RequestListener): this; + prependListener(event: "checkExpectation", listener: RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependListener(event: "request", listener: RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependOnceListener(event: "request", listener: RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract outgoing message from + * the perspective of the participants of an HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Alias of `outgoingMessage.socket`. + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected,`socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value. If the header already exists in the to-be-sent + * headers, its value will be replaced. Use an array of strings to send multiple + * headers with the same name. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | readonly string[]): this; + /** + * Append a single header value for the header object. + * + * If the value is an array, this is equivalent of calling this method multiple + * times. + * + * If there were no previous value for the header, this is equivalent of calling `outgoingMessage.setHeader(name, value)`. + * + * Depending of the value of `options.uniqueHeaders` when the client request or the + * server were created, this will end up in the header being sent multiple times or + * a single time with values joined using `; `. + * @since v18.3.0, v16.17.0 + * @param name Header name + * @param value Header value + */ + appendHeader(name: string, value: string | readonly string[]): this; + /** + * Gets the value of the HTTP header with the given name. If that header is not + * set, the returned value will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript `Object`. This means that + * typical `Object` methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers will **only** be emitted if the message is chunked encoded. If not, + * the trailers will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header field names in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Flushes the message headers. + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()`bypasses the optimization and kickstarts the message. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + /** + * If set to `true`, Node.js will check whether the `Content-Length`header value and the size of the body, in bytes, are equal. + * Mismatching the `Content-Length` header value will result + * in an `Error` being thrown, identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * @since v18.10.0, v16.18.0 + */ + strictContentLength: boolean; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on`Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics', + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }, earlyHintsCallback); + * ``` + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain', + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is read in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * will check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a \[`Error`\]\[\] being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends a HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`,`getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object.`'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an`'error'` listener registered. + * + * Set `Content-Length` header to limit the response body size. + * If `response.strictContentLength` is set to `true`, mismatching the`Content-Length` header value will result in an `Error` being thrown, + * identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * + * `Content-Length` value should be in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0,v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * import http from 'node:http'; + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * import http from 'node:http'; + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: "abort", listener: () => void): this; + addListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "continue", listener: () => void): this; + addListener(event: "information", listener: (info: InformationEvent) => void): this; + addListener(event: "response", listener: (response: IncomingMessage) => void): this; + addListener(event: "socket", listener: (socket: Socket) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: "abort", listener: () => void): this; + on(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "continue", listener: () => void): this; + on(event: "information", listener: (info: InformationEvent) => void): this; + on(event: "response", listener: (response: IncomingMessage) => void): this; + on(event: "socket", listener: (socket: Socket) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: "abort", listener: () => void): this; + once(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "continue", listener: () => void): this; + once(event: "information", listener: (info: InformationEvent) => void): this; + once(event: "response", listener: (response: IncomingMessage) => void): this; + once(event: "socket", listener: (socket: Socket) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: "abort", listener: () => void): this; + prependListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "continue", listener: () => void): this; + prependListener(event: "information", listener: (info: InformationEvent) => void): this; + prependListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependListener(event: "socket", listener: (socket: Socket) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: "abort", listener: () => void): this; + prependOnceListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "continue", listener: () => void): this; + prependOnceListener(event: "information", listener: (info: InformationEvent) => void): this; + prependOnceListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: "socket", listener: (socket: Socket) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers, and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the`IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST', + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`,`etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`,`last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`,`retry-after`, `server`, or `user-agent` are discarded. + * To allow duplicate values of the headers listed above to be joined, + * use the option `joinDuplicateHeaders` in {@link request} and {@link createServer}. See RFC 9110 Section 5.3 for more + * information. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with `; `. + * * For all other headers, the values are joined together with `, `. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * Similar to `message.headers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': ['curl/7.22.0'], + * // host: ['127.0.0.1:8000'], + * // accept: ['*'] } + * console.log(request.headersDistinct); + * ``` + * @since v18.3.0, v16.17.0 + */ + headersDistinct: NodeJS.Dict; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * Similar to `message.trailers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * Only populated at the `'end'` event. + * @since v18.3.0, v16.17.0 + */ + trailersDistinct: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(request.url, `http://${request.headers.host}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and `request.headers.host` is`'localhost:3000'`: + * + * ```console + * $ node + * > new URL(request.url, `http://${request.headers.host}`) + * URL { + * href: 'http://localhost:3000/status?name=ryan', + * origin: 'http://localhost:3000', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost:3000', + * hostname: 'localhost', + * port: '3000', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error`is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: "fifo" | "lifo" | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the`keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing`{agent: false}` as an option to the `http.get()` or `http.request()`functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false, // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * @since v0.3.4 + */ + class Agent extends EventEmitter { + /** + * By default set to 256. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer(); + * + * // Listen to the request event + * server.on('request', (request, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the`options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * import http from 'node:http'; + * import { Buffer } from 'node:buffer'; + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!', + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData), + * }, + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'` + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message `'Error: aborted'`and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort()` on the corresponding`AbortController` will behave the same way as calling `.destroy()` on the + * request. Specifically, the `'error'` event will be emitted with an error with + * the message `'AbortError: The operation was aborted'`, the code `'ABORT_ERR'`and the `cause`, if one was provided. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET by default and calls `req.end()`automatically. The callback must take care to + * consume the response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the method set to GET by default. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + /** + * Performs the low-level validations on the provided `name` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `name` will result in a `TypeError` being thrown, + * identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Example: + * + * ```js + * import { validateHeaderName } from 'node:http'; + * + * try { + * validateHeaderName(''); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code); // --> 'ERR_INVALID_HTTP_TOKEN' + * console.error(err.message); // --> 'Header name must be a valid HTTP token [""]' + * } + * ``` + * @since v14.3.0 + * @param [label='Header name'] Label for error message. + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided `value` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `value` will result in a `TypeError` being thrown. + * + * * Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * * Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Examples: + * + * ```js + * import { validateHeaderValue } from 'node:http'; + * + * try { + * validateHeaderValue('x-my-header', undefined); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_HTTP_INVALID_HEADER_VALUE'); // --> true + * console.error(err.message); // --> 'Invalid value "undefined" for header "x-my-header"' + * } + * + * try { + * validateHeaderValue('x-my-header', 'oʊmɪɡə'); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_INVALID_CHAR'); // --> true + * console.error(err.message); // --> 'Invalid character in header content ["x-my-header"]' + * } + * ``` + * @since v14.3.0 + * @param name Header name + * @param value Header value + */ + function validateHeaderValue(name: string, value: string): void; + /** + * Set the maximum number of idle HTTP parsers. + * @since v18.8.0, v16.18.0 + * @param [max=1000] + */ + function setMaxIdleHTTPParsers(max: number): void; + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; +} +declare module "node:http" { + export * from "http"; +} diff --git a/node_modules/@types/node/http2.d.ts b/node_modules/@types/node/http2.d.ts new file mode 100644 index 0000000..c3b3e8e --- /dev/null +++ b/node_modules/@types/node/http2.d.ts @@ -0,0 +1,2382 @@ +/** + * The `node:http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. + * It can be accessed using: + * + * ```js + * const http2 = require('node:http2'); + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http2.js) + */ +declare module "http2" { + import EventEmitter = require("node:events"); + import * as fs from "node:fs"; + import * as net from "node:net"; + import * as stream from "node:stream"; + import * as tls from "node:tls"; + import * as url from "node:url"; + import { + IncomingHttpHeaders as Http1IncomingHttpHeaders, + IncomingMessage, + OutgoingHttpHeaders, + ServerResponse, + } from "node:http"; + export { OutgoingHttpHeaders } from "node:http"; + export interface IncomingHttpStatusHeader { + ":status"?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ":path"?: string | undefined; + ":method"?: string | undefined; + ":authority"?: string | undefined; + ":scheme"?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined`if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be`undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session | undefined; + /** + * Provides miscellaneous information about the current state of the`Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: "aborted", listener: () => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: "streamClosed", listener: (code: number) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: "wantTrailers", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted"): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "frameError", frameType: number, errorCode: number): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: "streamClosed", code: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "trailers", trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "wantTrailers"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: () => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: "streamClosed", listener: (code: number) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: "wantTrailers", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: () => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: "streamClosed", listener: (code: number) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: "wantTrailers", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: () => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "streamClosed", listener: (code: number) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: "wantTrailers", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: () => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "streamClosed", listener: (code: number) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: "wantTrailers", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: "continue", listener: () => {}): this; + addListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "continue"): boolean; + emit(event: "headers", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: "push", headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "response", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "continue", listener: () => {}): this; + on( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "continue", listener: () => {}): this; + once( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "continue", listener: () => {}): this; + prependListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "continue", listener: () => {}): this; + prependOnceListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every`Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream`instance created for the push stream passed as the second argument, or an`Error` passed as the first argument. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to`true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream( + headers: OutgoingHttpHeaders, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + pushStream( + headers: OutgoingHttpHeaders, + options?: StreamPriorityOptions, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + /** + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * Initiates a response. When the `options.waitForTrailers` option is set, the`'wantTrailers'` event will be emitted immediately after queuing the last chunk + * of payload data to be sent. The `http2stream.sendTrailers()` method can then be + * used to sent trailing header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the`http2stream.respondWithFD()` method will perform an `fs.fstat()` call to + * collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD( + fd: number | fs.promises.FileHandle, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptions, + ): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream`will be closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR`code. If the `onError` callback is + * defined, then it will be called. Otherwise + * the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.error(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate`304` response: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile( + path: string, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptionsWithError, + ): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol`property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise`false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect`callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this`Http2Session`. The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the`http2session.settings()` method. Will be `false` once all sent `SETTINGS`frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to`http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or`tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error`is not undefined, an `'error'` event will be emitted immediately before the`'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the`Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false`otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the`maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView`containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING`payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping( + payload: NodeJS.ArrayBufferView, + callback: (err: Error | null, duration: number, payload: Buffer) => void, + ): boolean; + /** + * Calls `ref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new`SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true`while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings( + settings: Settings, + callback?: (err: Error | null, settings: Settings, duration: number) => void, + ): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + addListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + addListener(event: "localSettings", listener: (settings: Settings) => void): this; + addListener(event: "ping", listener: () => void): this; + addListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "frameError", frameType: number, errorCode: number, streamID: number): boolean; + emit(event: "goaway", errorCode: number, lastStreamID: number, opaqueData?: Buffer): boolean; + emit(event: "localSettings", settings: Settings): boolean; + emit(event: "ping"): boolean; + emit(event: "remoteSettings", settings: Settings): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + on(event: "localSettings", listener: (settings: Settings) => void): this; + on(event: "ping", listener: () => void): this; + on(event: "remoteSettings", listener: (settings: Settings) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + once(event: "localSettings", listener: (settings: Settings) => void): this; + once(event: "ping", listener: () => void): this; + once(event: "remoteSettings", listener: (settings: Settings) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependListener(event: "ping", listener: () => void): this; + prependListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependOnceListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependOnceListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "ping", listener: () => void): this; + prependOnceListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()`creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an`ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to`http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * const http2 = require('node:http2'); + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS, + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: "origin", listener: (origins: string[]) => void): this; + addListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "altsvc", alt: string, origin: string, stream: number): boolean; + emit(event: "origin", origins: readonly string[]): boolean; + emit(event: "connect", session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit( + event: "stream", + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + on(event: "origin", listener: (origins: string[]) => void): this; + on(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + once(event: "origin", listener: (origins: string[]) => void): this; + once( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: "origin", listener: (origins: string[]) => void): this; + prependListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: "origin", listener: (origins: string[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session extends Http2Session { + readonly server: Http2Server | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL`'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "connect", session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: "http:" | "https:" | undefined; + } + export interface ServerSessionOptions extends SessionOptions { + Http1IncomingMessage?: typeof IncomingMessage | undefined; + Http1ServerResponse?: typeof ServerResponse | undefined; + Http2ServerRequest?: typeof Http2ServerRequest | undefined; + Http2ServerResponse?: typeof Http2ServerResponse | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions extends ServerSessionOptions {} + export interface SecureServerOptions extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server extends net.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer extends tls.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "unknownProtocol", socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor( + stream: ServerHttp2Stream, + headers: IncomingHttpHeaders, + options: stream.ReadableOptions, + rawHeaders: readonly string[], + ); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from`req.headers[':authority']` if present. Otherwise, it is derived from`req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns`'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted", hadError: boolean, code: number): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "end"): boolean; + emit(event: "readable"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 `request` object. + * @since v15.7.0 + */ + readonly req: Http2ServerRequest; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ""; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | readonly string[]): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `node:http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and`Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }); + * ``` + * @since v18.11.0 + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the`Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse( + headers: OutgoingHttpHeaders, + callback: (err: Error | null, res: Http2ServerResponse) => void, + ): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session`instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * const http2 = require('node:http2'); + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session`instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * const http2 = require('node:http2'); + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8000); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + export function createServer( + options: ServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session`instances. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8443); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + export function createSecureServer( + options: SecureServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect( + authority: string | url.URL, + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; +} +declare module "node:http2" { + export * from "http2"; +} diff --git a/node_modules/@types/node/https.d.ts b/node_modules/@types/node/https.d.ts new file mode 100644 index 0000000..36ae5b2 --- /dev/null +++ b/node_modules/@types/node/https.d.ts @@ -0,0 +1,550 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/https.js) + */ +declare module "https" { + import { Duplex } from "node:stream"; + import * as tls from "node:tls"; + import * as http from "node:http"; + import { URL } from "node:url"; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = + & http.RequestOptions + & tls.SecureContextOptions + & { + checkServerIdentity?: typeof tls.checkServerIdentity | undefined; + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + rejectUnauthorized?: boolean | undefined; + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Duplex) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: http.RequestListener): this; + addListener(event: "checkExpectation", listener: http.RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: "request", listener: http.RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: "newSession", + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: "resumeSession", sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: "secureConnection", tlsSocket: tls.TLSSocket): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Duplex): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "clientError", err: Error, socket: Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Duplex) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: http.RequestListener): this; + on(event: "checkExpectation", listener: http.RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: "request", listener: http.RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Duplex) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: http.RequestListener): this; + once(event: "checkExpectation", listener: http.RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + once(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: "request", listener: http.RequestListener): this; + once(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Duplex) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: http.RequestListener): this; + prependListener(event: "checkExpectation", listener: http.RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: "request", listener: http.RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Duplex) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: http.RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: http.RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: "request", listener: http.RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample', + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted:`ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`,`honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`,`secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`,`highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const https = require('node:https'); + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false, + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * const tls = require('node:tls'); + * const https = require('node:https'); + * const crypto = require('node:crypto'); + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha256 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * const https = require('node:https'); + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module "node:https" { + export * from "https"; +} diff --git a/node_modules/@types/node/index.d.ts b/node_modules/@types/node/index.d.ts new file mode 100644 index 0000000..a596cad --- /dev/null +++ b/node_modules/@types/node/index.d.ts @@ -0,0 +1,88 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support NodeJS and TypeScript 4.9+. + +// Reference required types from the default lib: +/// +/// +/// +/// + +// Base definitions for all NodeJS modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +/// diff --git a/node_modules/@types/node/inspector.d.ts b/node_modules/@types/node/inspector.d.ts new file mode 100644 index 0000000..3927b81 --- /dev/null +++ b/node_modules/@types/node/inspector.d.ts @@ -0,0 +1,2747 @@ +// Type definitions for inspector + +// These definitions are auto-generated. +// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 +// for more information. + + +/** + * The `node:inspector` module provides an API for interacting with the V8 + * inspector. + * + * It can be accessed using: + * + * ```js + * import * as inspector from 'node:inspector/promises'; + * ``` + * + * or + * + * ```js + * import * as inspector from 'node:inspector'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + interface InspectorNotification { + method: string; + params: T; + } + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + /** + * Describes a type collected during runtime. + * @experimental + */ + interface TypeObject { + /** + * Name of a type collected with type profiling. + */ + name: string; + } + /** + * Source offset and types for a parameter or return value. + * @experimental + */ + interface TypeProfileEntry { + /** + * Source offset of the parameter or end of function for return values. + */ + offset: number; + /** + * The types for this parameter or return value. + */ + types: TypeObject[]; + } + /** + * Type profile data collected during runtime for a JavaScript script. + * @experimental + */ + interface ScriptTypeProfile { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Type profile entries for parameters and return values of the functions in the script. + */ + entries: TypeProfileEntry[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface TakeTypeProfileReturnType { + /** + * Type profile for all scripts since startTypeProfile() was turned on. + */ + result: ScriptTypeProfile[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. + */ + constructor(); + /** + * Connects a session to the inspector back-end. + * @since v8.0.0 + */ + connect(): void; + /** + * Immediately close the session. All pending message callbacks will be called + * with an error. `session.connect()` will need to be called to be able to send + * messages again. Reconnected session will lose all inspector state, such as + * enabled agents or configured breakpoints. + * @since v8.0.0 + */ + disconnect(): void; + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8\. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + * + * ## Example usage + * + * Apart from the debugger, various V8 Profilers are available through the DevTools + * protocol. + * @since v8.0.0 + */ + post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; + post(method: string, callback?: (err: Error | null, params?: {}) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + /** + * Enable type profile. + * @experimental + */ + post(method: 'Profiler.startTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Disable type profile. Disabling releases type profile data collected so far. + * @experimental + */ + post(method: 'Profiler.stopTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Collect type profile. + * @experimental + */ + post(method: 'Profiler.takeTypeProfile', callback?: (err: Error | null, params: Profiler.TakeTypeProfileReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + // Events + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification<{}>): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + } + /** + * Activate inspector on host and port. Equivalent to`node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the `security warning` regarding the `host`parameter usage. + * @param [port='what was specified on the CLI'] Port to listen on for inspector connections. Optional. + * @param [host='what was specified on the CLI'] Host to listen on for inspector connections. Optional. + * @param [wait=false] Block until a client has connected. Optional. + * @returns Disposable that calls `inspector.close()`. + */ + function open(port?: number, host?: string, wait?: boolean): Disposable; + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + /** + * Blocks until a client (existing or connected later) has sent`Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; +} +/** + * The inspector module provides an API for interacting with the V8 inspector. + */ +declare module 'node:inspector' { + import inspector = require('inspector'); + export = inspector; +} diff --git a/node_modules/@types/node/module.d.ts b/node_modules/@types/node/module.d.ts new file mode 100644 index 0000000..c38a9b8 --- /dev/null +++ b/node_modules/@types/node/module.d.ts @@ -0,0 +1,315 @@ +/** + * @since v0.3.7 + * @experimental + */ +declare module "module" { + import { URL } from "node:url"; + import { MessagePort } from "node:worker_threads"; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * const fs = require('node:fs'); + * const assert = require('node:assert'); + * const { syncBuiltinESMExports } = require('node:module'); + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('node:fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + * @return Returns `module.SourceMap` if a source map is found, `undefined` otherwise. + */ + function findSourceMap(path: string, error?: Error): SourceMap; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + interface SourceOrigin { + /** + * The name of the range in the source map, if one was provided + */ + name?: string; + /** + * The file name of the original source, as reported in the SourceMap + */ + fileName: string; + /** + * The 1-indexed lineNumber of the corresponding call site in the original source + */ + lineNumber: number; + /** + * The 1-indexed columnNumber of the corresponding call site in the original source + */ + columnNumber: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line offset and column offset in the generated source + * file, returns an object representing the SourceMap range in the + * original file if found, or an empty object if not. + * + * The object returned contains the following keys: + * + * The returned value represents the raw range as it appears in the + * SourceMap, based on zero-indexed offsets, _not_ 1-indexed line and + * column numbers as they appear in Error messages and CallSite + * objects. + * + * To get the corresponding 1-indexed line and column numbers from a + * lineNumber and columnNumber as they are reported by Error stacks + * and CallSite objects, use `sourceMap.findOrigin(lineNumber, columnNumber)` + * @param lineOffset The zero-indexed line number offset in the generated source + * @param columnOffset The zero-indexed column number offset in the generated source + */ + findEntry(lineOffset: number, columnOffset: number): SourceMapping; + /** + * Given a 1-indexed `lineNumber` and `columnNumber` from a call site in the generated source, + * find the corresponding call site location in the original source. + * + * If the `lineNumber` and `columnNumber` provided are not found in any source map, + * then an empty object is returned. + * @param lineNumber The 1-indexed line number of the call site in the generated source + * @param columnNumber The 1-indexed column number of the call site in the generated source + */ + findOrigin(lineNumber: number, columnNumber: number): SourceOrigin | {}; + } + /** @deprecated Use `ImportAttributes` instead */ + interface ImportAssertions extends ImportAttributes {} + interface ImportAttributes extends NodeJS.Dict { + type?: string | undefined; + } + type ModuleFormat = "builtin" | "commonjs" | "json" | "module" | "wasm"; + type ModuleSource = string | ArrayBuffer | NodeJS.TypedArray; + interface GlobalPreloadContext { + port: MessagePort; + } + /** + * @deprecated This hook will be removed in a future version. + * Use `initialize` instead. When a loader has an `initialize` export, `globalPreload` will be ignored. + * + * Sometimes it might be necessary to run some code inside of the same global scope that the application runs in. + * This hook allows the return of a string that is run as a sloppy-mode script on startup. + * + * @param context Information to assist the preload code + * @return Code to run before application startup + */ + type GlobalPreloadHook = (context: GlobalPreloadContext) => string; + /** + * The `initialize` hook provides a way to define a custom function that runs in the hooks thread + * when the hooks module is initialized. Initialization happens when the hooks module is registered via `register`. + * + * This hook can receive data from a `register` invocation, including ports and other transferrable objects. + * The return value of `initialize` can be a `Promise`, in which case it will be awaited before the main application thread execution resumes. + */ + type InitializeHook = (data: Data) => void | Promise; + interface ResolveHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + /** + * The module importing this one, or undefined if this is the Node.js entry point + */ + parentURL: string | undefined; + } + interface ResolveFnOutput { + /** + * A hint to the load hook (it might be ignored) + */ + format?: ModuleFormat | null | undefined; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions?: ImportAttributes | undefined; + /** + * The import attributes to use when caching the module (optional; if excluded the input will be used) + */ + importAttributes?: ImportAttributes | undefined; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The absolute URL to which this input resolves + */ + url: string; + } + /** + * The `resolve` hook chain is responsible for resolving file URL for a given module specifier and parent URL, and optionally its format (such as `'module'`) as a hint to the `load` hook. + * If a format is specified, the load hook is ultimately responsible for providing the final `format` value (and it is free to ignore the hint provided by `resolve`); + * if `resolve` provides a format, a custom `load` hook is required even if only to pass the value to the Node.js default `load` hook. + * + * @param specifier The specified URL path of the module to be resolved + * @param context + * @param nextResolve The subsequent `resolve` hook in the chain, or the Node.js default `resolve` hook after the last user-supplied resolve hook + */ + type ResolveHook = ( + specifier: string, + context: ResolveHookContext, + nextResolve: ( + specifier: string, + context?: ResolveHookContext, + ) => ResolveFnOutput | Promise, + ) => ResolveFnOutput | Promise; + interface LoadHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * The format optionally supplied by the `resolve` hook chain + */ + format: ModuleFormat; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + } + interface LoadFnOutput { + format: ModuleFormat; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The source for Node.js to evaluate + */ + source?: ModuleSource; + } + /** + * The `load` hook provides a way to define a custom method of determining how a URL should be interpreted, retrieved, and parsed. + * It is also in charge of validating the import assertion. + * + * @param url The URL/path of the module to be loaded + * @param context Metadata about the module + * @param nextLoad The subsequent `load` hook in the chain, or the Node.js default `load` hook after the last user-supplied `load` hook + */ + type LoadHook = ( + url: string, + context: LoadHookContext, + nextLoad: (url: string, context?: LoadHookContext) => LoadFnOutput | Promise, + ) => LoadFnOutput | Promise; + } + interface RegisterOptions { + parentURL: string | URL; + data?: Data | undefined; + transferList?: any[] | undefined; + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static isBuiltin(moduleName: string): boolean; + static Module: typeof Module; + static register( + specifier: string | URL, + parentURL?: string | URL, + options?: RegisterOptions, + ): void; + static register(specifier: string | URL, options?: RegisterOptions): void; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + /** + * The directory name of the current module. This is the same as the `path.dirname()` of the `import.meta.filename`. + * **Caveat:** only present on `file:` modules. + */ + dirname: string; + /** + * The full absolute path and filename of the current module, with symlinks resolved. + * This is the same as the `url.fileURLToPath()` of the `import.meta.url`. + * **Caveat:** only local modules support this property. Modules not using the `file:` protocol will not provide it. + */ + filename: string; + /** + * The absolute `file:` URL of the module. + */ + url: string; + /** + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * Second `parent` parameter is only used when the `--experimental-import-meta-resolve` + * command flag enabled. + * + * @since v20.6.0 + * + * @param specifier The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. + * @returns The absolute (`file:`) URL string for the resolved module. + */ + resolve(specifier: string, parent?: string | URL | undefined): string; + } + } + export = Module; +} +declare module "node:module" { + import module = require("module"); + export = module; +} diff --git a/node_modules/@types/node/net.d.ts b/node_modules/@types/node/net.d.ts new file mode 100644 index 0000000..c09b95d --- /dev/null +++ b/node_modules/@types/node/net.d.ts @@ -0,0 +1,954 @@ +/** + * > Stability: 2 - Stable + * + * The `node:net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * const net = require('node:net'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/net.js) + */ +declare module "net" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import * as dns from "node:dns"; + type LookupFunction = ( + hostname: string, + options: dns.LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | dns.LookupAddress[], family?: number) => void, + ) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to buffer and a reference to buffer. + * Return false from this function to implicitly pause() the socket. + */ + callback(bytesWritten: number, buf: Uint8Array): boolean; + } + interface ConnectOpts { + /** + * If specified, incoming data is stored in a single buffer and passed to the supplied callback when data arrives on the socket. + * Note: this will cause the streaming functionality to not provide any data, however events like 'error', 'end', and 'close' will + * still be emitted as normal and methods like pause() and resume() will also behave as expected. + */ + onread?: OnReadOpts | undefined; + } + interface TcpSocketConnectOpts extends ConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamily?: boolean | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamilyAttemptTimeout?: number | undefined; + } + interface IpcSocketConnectOpts extends ConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = "opening" | "open" | "readOnly" | "writeOnly" | "closed"; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Destroys the socket after all data is written. If the `finish` event was already emitted the socket is destroyed immediately. + * If the socket is still writable it implicitly calls `socket.end()`. + * @since v0.3.4 + */ + destroySoon(): void; + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet and destroy this TCP socket once it is connected. + * Otherwise, it will call `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. + * If this is not a TCP socket (for example, a pipe), calling this method will immediately throw an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0, v16.17.0 + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property is only present if the family autoselection algorithm is enabled in `socket.connect(options)` + * and it is an array of the addresses that have been attempted. + * + * Each address is a string in the form of `$IP:$PORT`. + * If the connection was successful, then the last address is the one that the socket is currently connected to. + * @since v19.4.0 + */ + readonly autoSelectFamilyAttemptedAddresses: string[]; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`,`socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * This is `true` if the socket is not connected yet, either because `.connect()`has not yet been called or because it is still in the process of connecting + * (see `socket.connecting`). + * @since v11.2.0, v10.16.0 + */ + readonly pending: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0, v16.18.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * + * * If the stream is connecting `socket.readyState` is `opening`. + * * If the stream is readable and writable, it is `open`. + * * If the stream is readable and not writable, it is `readOnly`. + * * If the stream is not readable and writable, it is `writeOnly`. + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by `socket.setTimeout()`. + * It is `undefined` if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. data + * 4. drain + * 5. end + * 6. error + * 7. lookup + * 8. ready + * 9. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (hadError: boolean) => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "data", listener: (data: Buffer) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "timeout", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", hadError: boolean): boolean; + emit(event: "connect"): boolean; + emit(event: "data", data: Buffer): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; + emit(event: "ready"): boolean; + emit(event: "timeout"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (hadError: boolean) => void): this; + on(event: "connect", listener: () => void): this; + on(event: "data", listener: (data: Buffer) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + on(event: "ready", listener: () => void): this; + on(event: "timeout", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (hadError: boolean) => void): this; + once(event: "connect", listener: () => void): this; + once(event: "data", listener: (data: Buffer) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + once(event: "ready", listener: () => void): this; + once(event: "timeout", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (hadError: boolean) => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "data", listener: (data: Buffer) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: (hadError: boolean) => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "data", listener: (data: Buffer) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.error('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + readonly listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "drop", listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "drop", data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "drop", listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "drop", listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "drop", listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "drop", listener: (data?: DropArgument) => void): this; + /** + * Calls {@link Server.close()} and returns a promise that fulfills when the server has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } + type IPVersion = "ipv4" | "ipv6"; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + /** + * The list of rules added to the blocklist. + * @since v15.0.0, v14.18.0 + */ + rules: readonly string[]; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * const net = require('node:net'); + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```bash + * telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```bash + * nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Gets the current default value of the `autoSelectFamily` option of `socket.connect(options)`. + * The initial default value is `true`, unless the command line option`--no-network-family-autoselection` is provided. + * @since v19.4.0 + */ + function getDefaultAutoSelectFamily(): boolean; + /** + * Sets the default value of the `autoSelectFamily` option of `socket.connect(options)`. + * @since v19.4.0 + */ + function setDefaultAutoSelectFamily(value: boolean): void; + /** + * Gets the current default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * The initial default value is `250`. + * @since v19.8.0 + */ + function getDefaultAutoSelectFamilyAttemptTimeout(): number; + /** + * Sets the default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * @since v19.8.0 + */ + function setDefaultAutoSelectFamilyAttemptTimeout(value: number): void; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module "node:net" { + export * from "net"; +} diff --git a/node_modules/@types/node/os.d.ts b/node_modules/@types/node/os.d.ts new file mode 100644 index 0000000..39a33f7 --- /dev/null +++ b/node_modules/@types/node/os.d.ts @@ -0,0 +1,478 @@ +/** + * The `node:os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * const os = require('node:os'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/os.js) + */ +declare module "os" { + interface CpuInfo { + model: string; + speed: number; + times: { + user: number; + nice: number; + sys: number; + idle: number; + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: "IPv4"; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: "IPv6"; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T | null; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * The array will be empty if no CPU information is available, such as if the`/proc` file system is unavailable. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20, + * }, + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * + * `os.cpus().length` should not be used to calculate the amount of parallelism + * available to an application. Use {@link availableParallelism} for this purpose. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns an estimate of the default amount of parallelism a program should use. + * Always returns a value greater than zero. + * + * This function is a small wrapper about libuv's [`uv_available_parallelism()`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_available_parallelism). + * @since v19.4.0, v18.14.0 + */ + function availableParallelism(): number; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and`gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a `SystemError` if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: "buffer" }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`,`'mips'`, `'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, + * and `'x64'`. + * + * The return value is equivalent to `process.arch`. + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`,`'linux'`,`'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as `arm`, `arm64`, `aarch64`,`mips`, `mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0, v16.18.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): "BE" | "LE"; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If`pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19`(low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in`os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to`PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module "node:os" { + export * from "os"; +} diff --git a/node_modules/@types/node/package.json b/node_modules/@types/node/package.json new file mode 100644 index 0000000..8999f88 --- /dev/null +++ b/node_modules/@types/node/package.json @@ -0,0 +1,217 @@ +{ + "name": "@types/node", + "version": "20.12.2", + "description": "TypeScript definitions for node", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node", + "license": "MIT", + "contributors": [ + { + "name": "Microsoft TypeScript", + "githubUsername": "Microsoft", + "url": "https://github.com/Microsoft" + }, + { + "name": "Alberto Schiabel", + "githubUsername": "jkomyno", + "url": "https://github.com/jkomyno" + }, + { + "name": "Alvis HT Tang", + "githubUsername": "alvis", + "url": "https://github.com/alvis" + }, + { + "name": "Andrew Makarov", + "githubUsername": "r3nya", + "url": "https://github.com/r3nya" + }, + { + "name": "Benjamin Toueg", + "githubUsername": "btoueg", + "url": "https://github.com/btoueg" + }, + { + "name": "Chigozirim C.", + "githubUsername": "smac89", + "url": "https://github.com/smac89" + }, + { + "name": "David Junger", + "githubUsername": "touffy", + "url": "https://github.com/touffy" + }, + { + "name": "Deividas Bakanas", + "githubUsername": "DeividasBakanas", + "url": "https://github.com/DeividasBakanas" + }, + { + "name": "Eugene Y. Q. Shen", + "githubUsername": "eyqs", + "url": "https://github.com/eyqs" + }, + { + "name": "Hannes Magnusson", + "githubUsername": "Hannes-Magnusson-CK", + "url": "https://github.com/Hannes-Magnusson-CK" + }, + { + "name": "Huw", + "githubUsername": "hoo29", + "url": "https://github.com/hoo29" + }, + { + "name": "Kelvin Jin", + "githubUsername": "kjin", + "url": "https://github.com/kjin" + }, + { + "name": "Klaus Meinhardt", + "githubUsername": "ajafff", + "url": "https://github.com/ajafff" + }, + { + "name": "Lishude", + "githubUsername": "islishude", + "url": "https://github.com/islishude" + }, + { + "name": "Mariusz Wiktorczyk", + "githubUsername": "mwiktorczyk", + "url": "https://github.com/mwiktorczyk" + }, + { + "name": "Mohsen Azimi", + "githubUsername": "mohsen1", + "url": "https://github.com/mohsen1" + }, + { + "name": "Nikita Galkin", + "githubUsername": "galkin", + "url": "https://github.com/galkin" + }, + { + "name": "Parambir Singh", + "githubUsername": "parambirs", + "url": "https://github.com/parambirs" + }, + { + "name": "Sebastian Silbermann", + "githubUsername": "eps1lon", + "url": "https://github.com/eps1lon" + }, + { + "name": "Thomas den Hollander", + "githubUsername": "ThomasdenH", + "url": "https://github.com/ThomasdenH" + }, + { + "name": "Wilco Bakker", + "githubUsername": "WilcoBakker", + "url": "https://github.com/WilcoBakker" + }, + { + "name": "wwwy3y3", + "githubUsername": "wwwy3y3", + "url": "https://github.com/wwwy3y3" + }, + { + "name": "Samuel Ainsworth", + "githubUsername": "samuela", + "url": "https://github.com/samuela" + }, + { + "name": "Kyle Uehlein", + "githubUsername": "kuehlein", + "url": "https://github.com/kuehlein" + }, + { + "name": "Thanik Bhongbhibhat", + "githubUsername": "bhongy", + "url": "https://github.com/bhongy" + }, + { + "name": "Marcin Kopacz", + "githubUsername": "chyzwar", + "url": "https://github.com/chyzwar" + }, + { + "name": "Trivikram Kamat", + "githubUsername": "trivikr", + "url": "https://github.com/trivikr" + }, + { + "name": "Junxiao Shi", + "githubUsername": "yoursunny", + "url": "https://github.com/yoursunny" + }, + { + "name": "Ilia Baryshnikov", + "githubUsername": "qwelias", + "url": "https://github.com/qwelias" + }, + { + "name": "ExE Boss", + "githubUsername": "ExE-Boss", + "url": "https://github.com/ExE-Boss" + }, + { + "name": "Piotr Błażejewicz", + "githubUsername": "peterblazejewicz", + "url": "https://github.com/peterblazejewicz" + }, + { + "name": "Anna Henningsen", + "githubUsername": "addaleax", + "url": "https://github.com/addaleax" + }, + { + "name": "Victor Perin", + "githubUsername": "victorperin", + "url": "https://github.com/victorperin" + }, + { + "name": "Yongsheng Zhang", + "githubUsername": "ZYSzys", + "url": "https://github.com/ZYSzys" + }, + { + "name": "NodeJS Contributors", + "githubUsername": "NodeJS", + "url": "https://github.com/NodeJS" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "wafuwafu13", + "githubUsername": "wafuwafu13", + "url": "https://github.com/wafuwafu13" + }, + { + "name": "Matteo Collina", + "githubUsername": "mcollina", + "url": "https://github.com/mcollina" + }, + { + "name": "Dmitry Semigradsky", + "githubUsername": "Semigradsky", + "url": "https://github.com/Semigradsky" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node" + }, + "scripts": {}, + "dependencies": { + "undici-types": "~5.26.4" + }, + "typesPublisherContentHash": "87dfc7f77cdee3468250d81c9d0a1affd373ef091004a961696e12cd422beb71", + "typeScriptVersion": "4.7" +} \ No newline at end of file diff --git a/node_modules/@types/node/path.d.ts b/node_modules/@types/node/path.d.ts new file mode 100644 index 0000000..6f07681 --- /dev/null +++ b/node_modules/@types/node/path.d.ts @@ -0,0 +1,191 @@ +declare module "path/posix" { + import path = require("path"); + export = path; +} +declare module "path/win32" { + import path = require("path"); + export = path; +} +/** + * The `node:path` module provides utilities for working with file and directory + * paths. It can be accessed using: + * + * ```js + * const path = require('node:path'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/path.js) + */ +declare module "path" { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: "\\" | "/"; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ";" | ":"; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module "node:path" { + import path = require("path"); + export = path; +} +declare module "node:path/posix" { + import path = require("path/posix"); + export = path; +} +declare module "node:path/win32" { + import path = require("path/win32"); + export = path; +} diff --git a/node_modules/@types/node/perf_hooks.d.ts b/node_modules/@types/node/perf_hooks.d.ts new file mode 100644 index 0000000..c0ce852 --- /dev/null +++ b/node_modules/@types/node/perf_hooks.d.ts @@ -0,0 +1,645 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * * [Resource Timing](https://www.w3.org/TR/resource-timing-2/) + * + * ```js + * const { PerformanceObserver, performance } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/perf_hooks.js) + */ +declare module "perf_hooks" { + import { AsyncResource } from "node:async_hooks"; + type EntryType = "node" | "mark" | "measure" | "gc" | "function" | "http2" | "http" | "dns" | "net"; + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * The constructor of this class is not exposed to users directly. + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + /** + * Exposes marks created via the `Performance.mark()` method. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: "mark"; + } + /** + * Exposes measures created via the `Performance.measure()` method. + * + * The constructor of this class is not exposed to users directly. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: "measure"; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param util1 The result of a previous call to eventLoopUtilization() + * @param util2 The result of a previous call to eventLoopUtilization() prior to util1 + */ + type EventLoopUtilityFunction = ( + util1?: EventLoopUtilization, + util2?: EventLoopUtilization, + ) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()`. + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using + * `perf_hooks.createHistogram()` that will record runtime durations in + * nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. + * If name is provided, removes only the named mark. + * @param name + */ + clearMarks(name?: string): void; + /** + * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. + * If name is provided, removes only the named measure. + * @param name + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new PerformanceMark entry in the Performance Timeline. + * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', + * and whose performanceEntry.duration is always 0. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * @param name + * @return The PerformanceMark entry that was created + */ + mark(name?: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * @return the current high resolution millisecond timestamp + */ + now(): number; + /** + * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. + */ + readonly timeOrigin: number; + /** + * Wraps a function within a new function that measures the running time of the wrapped function. + * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + } + interface PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + /** + * @since v8.5.0 + */ + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`: + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: readonly EntryType[]; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + }, + ): void; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * + * ## Examples + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from `other` to this histogram. + * @since v17.4.0, v16.14.0 + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * const { monitorEventLoopDelay } = require('node:perf_hooks'); + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + import { performance as _performance } from "perf_hooks"; + global { + /** + * `performance` is a global reference for `require('perf_hooks').performance` + * https://nodejs.org/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } ? T + : typeof _performance; + } +} +declare module "node:perf_hooks" { + export * from "perf_hooks"; +} diff --git a/node_modules/@types/node/process.d.ts b/node_modules/@types/node/process.d.ts new file mode 100644 index 0000000..6697c9d --- /dev/null +++ b/node_modules/@types/node/process.d.ts @@ -0,0 +1,1576 @@ +declare module "process" { + import * as tty from "node:tty"; + import { Worker } from "node:worker_threads"; + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + rss: number; + heapTotal: number; + heapUsed: number; + external: number; + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = + | "aix" + | "android" + | "darwin" + | "freebsd" + | "haiku" + | "linux" + | "openbsd" + | "sunos" + | "win32" + | "cygwin" + | "netbsd"; + type Architecture = + | "arm" + | "arm64" + | "ia32" + | "mips" + | "mipsel" + | "ppc" + | "ppc64" + | "riscv64" + | "s390" + | "s390x" + | "x64"; + type Signals = + | "SIGABRT" + | "SIGALRM" + | "SIGBUS" + | "SIGCHLD" + | "SIGCONT" + | "SIGFPE" + | "SIGHUP" + | "SIGILL" + | "SIGINT" + | "SIGIO" + | "SIGIOT" + | "SIGKILL" + | "SIGPIPE" + | "SIGPOLL" + | "SIGPROF" + | "SIGPWR" + | "SIGQUIT" + | "SIGSEGV" + | "SIGSTKFLT" + | "SIGSTOP" + | "SIGSYS" + | "SIGTERM" + | "SIGTRAP" + | "SIGTSTP" + | "SIGTTIN" + | "SIGTTOU" + | "SIGUNUSED" + | "SIGURG" + | "SIGUSR1" + | "SIGUSR2" + | "SIGVTALRM" + | "SIGWINCH" + | "SIGXCPU" + | "SIGXFSZ" + | "SIGBREAK" + | "SIGLOST" + | "SIGINFO"; + type UncaughtExceptionOrigin = "uncaughtException" | "unhandledRejection"; + type MultipleResolveType = "resolve" | "reject"; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = ( + type: MultipleResolveType, + promise: Promise, + value: unknown, + ) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + bigint(): bigint; + } + interface ProcessReport { + /** + * Directory where the report is written. + * working directory of the Node.js process. + * @default '' indicating that reports are written to the current + */ + directory: string; + /** + * Filename where the report is written. + * The default value is the empty string. + * @default '' the output filename will be comprised of a timestamp, + * PID, and sequence number. + */ + filename: string; + /** + * Returns a JSON-formatted diagnostic report for the running process. + * The report's JavaScript stack trace is taken from err, if present. + */ + getReport(err?: Error): string; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from err, if present. + * + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param error A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string): string; + writeReport(error?: Error): string; + writeReport(fileName?: string, err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'node:process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling`process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + openStdin(): Socket; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'node:process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```bash + * node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```bash + * node --harmony script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ['--harmony'] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'node:process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'node:process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'node:process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning with a code and additional detail. + * emitWarning('Something happened!', { + * code: 'MY_WARNING', + * detail: 'This is some additional information', + * }); + * // Emits: + * // (node:56338) [MY_WARNING] Warning: Something happened! + * // This is some additional information + * ``` + * + * In this example, an `Error` object is generated internally by`process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'node:process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, the `options` argument is ignored. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```bash + * node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'node:process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'node:process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread's `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. On Windows, a copy of`process.env` on a `Worker` instance operates in a case-sensitive manner + * unlike the main thread. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'node:process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and`process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()`explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the`process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'node:process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the`process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'node:process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. For string type, only integer strings (e.g.,'1') are allowed. + */ + exit(code?: number): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @since v0.11.8 + */ + exitCode?: number | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function,`process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.sourceMapsEnabled` property returns whether the [Source Map v3](https://sourcemaps.info/spec.html) support for stack traces is enabled. + * @since v20.7.0 + * @experimental + */ + readonly sourceMapsEnabled: boolean; + /** + * This function enables or disables the [Source Map v3](https://sourcemaps.info/spec.html) support for + * stack traces. + * + * It provides same features as launching Node.js process with commandline options`--enable-source-maps`. + * + * Only source maps in JavaScript files that are loaded after source maps has been + * enabled will be parsed and loaded. + * @since v16.6.0, v14.18.0 + * @experimental + */ + setSourceMapsEnabled(value: boolean): void; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'node:process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'node:process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '20.2.0', + * acorn: '8.8.2', + * ada: '2.4.0', + * ares: '1.19.0', + * base64: '0.5.0', + * brotli: '1.0.9', + * cjs_module_lexer: '1.2.2', + * cldr: '43.0', + * icu: '73.1', + * llhttp: '8.1.0', + * modules: '115', + * napi: '8', + * nghttp2: '1.52.0', + * nghttp3: '0.7.0', + * ngtcp2: '0.8.1', + * openssl: '3.0.8+quic', + * simdutf: '3.2.9', + * tz: '2023c', + * undici: '5.22.0', + * unicode: '15.0', + * uv: '1.44.2', + * uvwasi: '0.0.16', + * v8: '11.3.244.8-node.9', + * zlib: '1.2.13' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns a frozen `Object` containing the + * JavaScript representation of the configure options used to compile the current + * Node.js executable. This is the same as the `config.gypi` file that was produced + * when running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'node:process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * Loads the environment configuration from a `.env` file into `process.env`. If + * the file is not found, error will be thrown. + * + * To load a specific .env file by specifying its path, use the following code: + * + * ```js + * import { loadEnvFile } from 'node:process'; + * + * loadEnvFile('./development.env') + * ``` + * @since v20.12.0 + * @param path The path to the .env file + */ + loadEnvFile(path?: string | URL | Buffer): void; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'node:process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'node:process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the`process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ`memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'node:process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'node:process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * Gets the amount of memory available to the process (in bytes) based on + * limits imposed by the OS. If there is no such constraint, or the constraint + * is unknown, `undefined` is returned. + * + * See [`uv_get_constrained_memory`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_get_constrained_memory) for more + * information. + * @since v19.6.0, v18.15.0 + * @experimental + */ + constrainedMemory(): number | undefined; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'node:process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'node:process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Hydrogen', + * sourceUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v18.12.0/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the`name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + features: { + inspector: boolean; + debug: boolean; + uv: boolean; + ipv6: boolean; + tls_alpn: boolean; + tls_sni: boolean; + tls_ocsp: boolean; + tls: boolean; + }; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential * + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If the Node.js process was spawned with an IPC channel, the process.channel property is a reference to the IPC channel. + * If no IPC channel exists, this property is undefined. + * @since v7.1.0 + */ + channel?: { + /** + * This method makes the IPC channel keep the event loop of the process running if .unref() has been called before. + * @since v7.1.0 + */ + ref(): void; + /** + * This method makes the IPC channel not keep the event loop of the process running, and lets it finish even while the channel is open. + * @since v7.1.0 + */ + unref(): void; + }; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be `undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles.`options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + swallowErrors?: boolean | undefined; + }, + callback?: (error: Error | null) => void, + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel,`process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return`true` so long as the IPC + * channel is connected and will return `false` after`process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides`Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g.,`inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'node:process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic + * reports for the current process. Additional documentation is available in the `report documentation`. + * @since v11.8.0 + */ + report?: ProcessReport | undefined; + /** + * ```js + * import { resourceUsage } from 'node:process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The `process.traceDeprecation` property indicates whether the`--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: "beforeExit", listener: BeforeExitListener): this; + addListener(event: "disconnect", listener: DisconnectListener): this; + addListener(event: "exit", listener: ExitListener): this; + addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + addListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + addListener(event: "warning", listener: WarningListener): this; + addListener(event: "message", listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: "multipleResolves", listener: MultipleResolveListener): this; + addListener(event: "worker", listener: WorkerListener): this; + emit(event: "beforeExit", code: number): boolean; + emit(event: "disconnect"): boolean; + emit(event: "exit", code: number): boolean; + emit(event: "rejectionHandled", promise: Promise): boolean; + emit(event: "uncaughtException", error: Error): boolean; + emit(event: "uncaughtExceptionMonitor", error: Error): boolean; + emit(event: "unhandledRejection", reason: unknown, promise: Promise): boolean; + emit(event: "warning", warning: Error): boolean; + emit(event: "message", message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit( + event: "multipleResolves", + type: MultipleResolveType, + promise: Promise, + value: unknown, + ): this; + emit(event: "worker", listener: WorkerListener): this; + on(event: "beforeExit", listener: BeforeExitListener): this; + on(event: "disconnect", listener: DisconnectListener): this; + on(event: "exit", listener: ExitListener): this; + on(event: "rejectionHandled", listener: RejectionHandledListener): this; + on(event: "uncaughtException", listener: UncaughtExceptionListener): this; + on(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + on(event: "warning", listener: WarningListener): this; + on(event: "message", listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: "multipleResolves", listener: MultipleResolveListener): this; + on(event: "worker", listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "beforeExit", listener: BeforeExitListener): this; + once(event: "disconnect", listener: DisconnectListener): this; + once(event: "exit", listener: ExitListener): this; + once(event: "rejectionHandled", listener: RejectionHandledListener): this; + once(event: "uncaughtException", listener: UncaughtExceptionListener): this; + once(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + once(event: "warning", listener: WarningListener): this; + once(event: "message", listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: "multipleResolves", listener: MultipleResolveListener): this; + once(event: "worker", listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "beforeExit", listener: BeforeExitListener): this; + prependListener(event: "disconnect", listener: DisconnectListener): this; + prependListener(event: "exit", listener: ExitListener): this; + prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependListener(event: "warning", listener: WarningListener): this; + prependListener(event: "message", listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependListener(event: "worker", listener: WorkerListener): this; + prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; + prependOnceListener(event: "disconnect", listener: DisconnectListener): this; + prependOnceListener(event: "exit", listener: ExitListener): this; + prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependOnceListener(event: "warning", listener: WarningListener): this; + prependOnceListener(event: "message", listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependOnceListener(event: "worker", listener: WorkerListener): this; + listeners(event: "beforeExit"): BeforeExitListener[]; + listeners(event: "disconnect"): DisconnectListener[]; + listeners(event: "exit"): ExitListener[]; + listeners(event: "rejectionHandled"): RejectionHandledListener[]; + listeners(event: "uncaughtException"): UncaughtExceptionListener[]; + listeners(event: "uncaughtExceptionMonitor"): UncaughtExceptionListener[]; + listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; + listeners(event: "warning"): WarningListener[]; + listeners(event: "message"): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: "multipleResolves"): MultipleResolveListener[]; + listeners(event: "worker"): WorkerListener[]; + } + } + } + export = process; +} +declare module "node:process" { + import process = require("process"); + export = process; +} diff --git a/node_modules/@types/node/punycode.d.ts b/node_modules/@types/node/punycode.d.ts new file mode 100644 index 0000000..d2fc9f9 --- /dev/null +++ b/node_modules/@types/node/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated.**In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * const punycode = require('punycode'); + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word,`'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string`'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/punycode.js) + */ +declare module "punycode" { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: readonly number[]): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module "node:punycode" { + export * from "punycode"; +} diff --git a/node_modules/@types/node/querystring.d.ts b/node_modules/@types/node/querystring.d.ts new file mode 100644 index 0000000..b36ea2c --- /dev/null +++ b/node_modules/@types/node/querystring.d.ts @@ -0,0 +1,141 @@ +/** + * The `node:querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * const querystring = require('node:querystring'); + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical or + * when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/querystring.js) + */ +declare module "querystring" { + interface StringifyOptions { + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + maxKeys?: number | undefined; + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends + NodeJS.Dict< + | string + | number + | boolean + | readonly string[] + | readonly number[] + | readonly boolean[] + | null + > + {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```json + * { + * "foo": "bar", + * "abc": ["xyz", "123"] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module "node:querystring" { + export * from "querystring"; +} diff --git a/node_modules/@types/node/readline.d.ts b/node_modules/@types/node/readline.d.ts new file mode 100644 index 0000000..b06d58b --- /dev/null +++ b/node_modules/@types/node/readline.d.ts @@ -0,0 +1,539 @@ +/** + * The `node:readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `node:readline`module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/readline.js) + */ +declare module "readline" { + import { Abortable, EventEmitter } from "node:events"; + import * as promises from "node:readline/promises"; + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' '), + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0, v14.17.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * An error will be thrown if calling `rl.question()` after `rl.close()`. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "history", listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "history", history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "history", listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "history", listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "history", listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "history", listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = ( + line: string, + callback: (err?: null | Error, result?: CompleterResult) => void, + ) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + input: NodeJS.ReadableStream; + output?: NodeJS.WritableStream | undefined; + completer?: Completer | AsyncCompleter | undefined; + terminal?: boolean | undefined; + /** + * Initial list of history lines. This option makes sense + * only if `terminal` is set to `true` by the user or by an internal `output` + * check, otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + historySize?: number | undefined; + prompt?: string | undefined; + crlfDelay?: number | undefined; + /** + * If `true`, when a new input line added + * to the history list duplicates an older one, this removes the older line + * from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + escapeCodeTimeout?: number | undefined; + tabSize?: number | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface`instance. + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives an [EOF character](https://en.wikipedia.org/wiki/End-of-file#EOF_character). To exit without + * waiting for user input, call `process.stdin.unref()`. + * @since v0.1.98 + */ + export function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ', + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity, + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('node:events'); + * const { createReadStream } = require('node:fs'); + * const { createInterface } = require('node:readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given `TTY` stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given `TTY` stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module "node:readline" { + export * from "readline"; +} diff --git a/node_modules/@types/node/readline/promises.d.ts b/node_modules/@types/node/readline/promises.d.ts new file mode 100644 index 0000000..73fb111 --- /dev/null +++ b/node_modules/@types/node/readline/promises.d.ts @@ -0,0 +1,150 @@ +/** + * @since v17.0.0 + * @experimental + */ +declare module "readline/promises" { + import { AsyncCompleter, Completer, Direction, Interface as _Interface, ReadLineOptions } from "node:readline"; + import { Abortable } from "node:events"; + /** + * Instances of the `readlinePromises.Interface` class are constructed using the`readlinePromises.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v17.0.0 + */ + class Interface extends _Interface { + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * If the question is called after `rl.close()`, it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an `AbortSignal` to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * @since v17.0.0 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @return A promise that is fulfilled with the user's input in response to the `query`. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + /** + * @since v17.0.0 + */ + class Readline { + /** + * @param stream A TTY stream. + */ + constructor( + stream: NodeJS.WritableStream, + options?: { + autoCommit?: boolean; + }, + ); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an + * action that clears current line of the associated `stream` in a specified + * direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an + * action that clears the associated stream from the current position of the + * cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated`stream` and clears the internal list of pending actions. + * @since v17.0.0 + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action + * that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an + * action that moves the cursor _relative_ to its current position in the + * associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback` methods clears the internal list of pending actions without + * sending it to the associated `stream`. + * @since v17.0.0 + * @return this + */ + rollback(): this; + } + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface`instance. + * + * ```js + * const readlinePromises = require('node:readline/promises'); + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case + * is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * @since v17.0.0 + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module "node:readline/promises" { + export * from "readline/promises"; +} diff --git a/node_modules/@types/node/repl.d.ts b/node_modules/@types/node/repl.d.ts new file mode 100644 index 0000000..6c5f81b --- /dev/null +++ b/node_modules/@types/node/repl.d.ts @@ -0,0 +1,430 @@ +/** + * The `node:repl` module provides a Read-Eval-Print-Loop (REPL) implementation + * that is available both as a standalone program or includible in other + * applications. It can be accessed using: + * + * ```js + * const repl = require('node:repl'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/repl.js) + */ +declare module "repl" { + import { AsyncCompleter, Completer, Interface } from "node:readline"; + import { Context } from "node:vm"; + import { InspectOptions } from "node:util"; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * Default: the REPL instance's `terminal` value. + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * Default: `false`. + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * Default: `false`. + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * Default: a wrapper for `util.inspect`. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * Default: `false`. + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = ( + this: REPLServer, + evalCmd: string, + context: Context, + file: string, + cb: (err: Error | null, result: any) => void, + ) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * const repl = require('node:repl'); + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the`keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * const repl = require('node:repl'); + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * }, + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output`and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "exit", listener: () => void): this; + addListener(event: "reset", listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "exit"): boolean; + emit(event: "reset", context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "exit", listener: () => void): this; + on(event: "reset", listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "exit", listener: () => void): this; + once(event: "reset", listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "exit", listener: () => void): this; + prependListener(event: "reset", listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "exit", listener: () => void): this; + prependOnceListener(event: "reset", listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * const repl = require('node:repl'); + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module "node:repl" { + export * from "repl"; +} diff --git a/node_modules/@types/node/stream.d.ts b/node_modules/@types/node/stream.d.ts new file mode 100644 index 0000000..947a019 --- /dev/null +++ b/node_modules/@types/node/stream.d.ts @@ -0,0 +1,1701 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `node:stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`. + * + * To access the `node:stream` module: + * + * ```js + * const stream = require('node:stream'); + * ``` + * + * The `node:stream` module is useful for creating new types of stream instances. + * It is usually not necessary to use the `node:stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/stream.js) + */ +declare module "stream" { + import { Abortable, EventEmitter } from "node:events"; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from "node:stream/promises"; + import * as streamConsumers from "node:stream/consumers"; + import * as streamWeb from "node:stream/web"; + + type ComposeFnParam = (source: any) => void; + + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + }, + ): T; + compose( + stream: T | ComposeFnParam | Iterable | AsyncIterable, + options?: { signal: AbortSignal }, + ): T; + } + import Stream = internal.Stream; + import Readable = internal.Readable; + import ReadableOptions = internal.ReadableOptions; + interface ArrayOptions { + /** the maximum concurrent invocations of `fn` to call on the stream at once. **Default: 1**. */ + concurrency?: number; + /** allows destroying the stream if the signal is aborted. */ + signal?: AbortSignal; + } + class ReadableBase extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call `readable.read()`, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when `'end'` event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the `Three states` section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which + * case all of the data remaining in the internal + * buffer will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the`size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most + * typical cases, there will be no reason to + * use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * const fs = require('node:fs'); + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * const { StringDecoder } = require('node:string_decoder'); + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array`, or `null`. For object mode + * streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `node:stream`module API as it is currently defined. (See `Compatibility` for more + * information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * const { OldReader } = require('./old-api-module.js'); + * const { Readable } = require('node:stream'); + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + /** + * The iterator created by this method gives users the option to cancel the destruction + * of the stream if the `for await...of` loop is exited by `return`, `break`, or `throw`, + * or if the iterator should destroy the stream if the stream emitted an error during iteration. + * @since v16.3.0 + * @param options.destroyOnReturn When set to `false`, calling `return` on the async iterator, + * or exiting a `for await...of` iteration using a `break`, `return`, or `throw` will not destroy the stream. + * **Default: `true`**. + */ + iterator(options?: { destroyOnReturn?: boolean }): AsyncIterableIterator; + /** + * This method allows mapping over the stream. The *fn* function will be called for every chunk in the stream. + * If the *fn* function returns a promise - that promise will be `await`ed before being passed to the result stream. + * @since v17.4.0, v16.14.0 + * @param fn a function to map over every chunk in the stream. Async or not. + * @returns a stream mapped with the function *fn*. + */ + map(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method allows filtering the stream. For each chunk in the stream the *fn* function will be called + * and if it returns a truthy value, the chunk will be passed to the result stream. + * If the *fn* function returns a promise - that promise will be `await`ed. + * @since v17.4.0, v16.14.0 + * @param fn a function to filter chunks from the stream. Async or not. + * @returns a stream filtered with the predicate *fn*. + */ + filter( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Readable; + /** + * This method allows iterating a stream. For each chunk in the stream the *fn* function will be called. + * If the *fn* function returns a promise - that promise will be `await`ed. + * + * This method is different from `for await...of` loops in that it can optionally process chunks concurrently. + * In addition, a `forEach` iteration can only be stopped by having passed a `signal` option + * and aborting the related AbortController while `for await...of` can be stopped with `break` or `return`. + * In either case the stream will be destroyed. + * + * This method is different from listening to the `'data'` event in that it uses the `readable` event + * in the underlying machinary and can limit the number of concurrent *fn* calls. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise for when the stream has finished. + */ + forEach( + fn: (data: any, options?: Pick) => void | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method allows easily obtaining the contents of a stream. + * + * As this method reads the entire stream into memory, it negates the benefits of streams. It's intended + * for interoperability and convenience, not as the primary way to consume streams. + * @since v17.5.0 + * @returns a promise containing an array with the contents of the stream. + */ + toArray(options?: Pick): Promise; + /** + * This method is similar to `Array.prototype.some` and calls *fn* on each chunk in the stream + * until the awaited return value is `true` (or any truthy value). Once an *fn* call on a chunk + * `await`ed return value is truthy, the stream is destroyed and the promise is fulfilled with `true`. + * If none of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `false`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for at least one of the chunks. + */ + some( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.find` and calls *fn* on each chunk in the stream + * to find a chunk with a truthy value for *fn*. Once an *fn* call's awaited return value is truthy, + * the stream is destroyed and the promise is fulfilled with value for which *fn* returned a truthy value. + * If all of the *fn* calls on the chunks return a falsy value, the promise is fulfilled with `undefined`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to the first chunk for which *fn* evaluated with a truthy value, + * or `undefined` if no element was found. + */ + find( + fn: (data: any, options?: Pick) => data is T, + options?: ArrayOptions, + ): Promise; + find( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.every` and calls *fn* on each chunk in the stream + * to check if all awaited return values are truthy value for *fn*. Once an *fn* call on a chunk + * `await`ed return value is falsy, the stream is destroyed and the promise is fulfilled with `false`. + * If all of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `true`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for every one of the chunks. + */ + every( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method returns a new stream by applying the given callback to each chunk of the stream + * and then flattening the result. + * + * It is possible to return a stream or another iterable or async iterable from *fn* and the result streams + * will be merged (flattened) into the returned stream. + * @since v17.5.0 + * @param fn a function to map over every chunk in the stream. May be async. May be a stream or generator. + * @returns a stream flat-mapped with the function *fn*. + */ + flatMap(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method returns a new stream with the first *limit* chunks dropped from the start. + * @since v17.5.0 + * @param limit the number of chunks to drop from the readable. + * @returns a stream with *limit* chunks dropped from the start. + */ + drop(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with the first *limit* chunks. + * @since v17.5.0 + * @param limit the number of chunks to take from the readable. + * @returns a stream with *limit* chunks taken. + */ + take(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with chunks of the underlying stream paired with a counter + * in the form `[index, chunk]`. The first index value is `0` and it increases by 1 for each chunk produced. + * @since v17.5.0 + * @returns a stream of indexed pairs. + */ + asIndexedPairs(options?: Pick): Readable; + /** + * This method calls *fn* on each chunk of the stream in order, passing it the result from the calculation + * on the previous element. It returns a promise for the final value of the reduction. + * + * If no *initial* value is supplied the first chunk of the stream is used as the initial value. + * If the stream is empty, the promise is rejected with a `TypeError` with the `ERR_INVALID_ARGS` code property. + * + * The reducer function iterates the stream element-by-element which means that there is no *concurrency* parameter + * or parallelism. To perform a reduce concurrently, you can extract the async function to `readable.map` method. + * @since v17.5.0 + * @param fn a reducer function to call over every chunk in the stream. Async or not. + * @param initial the initial value to use in the reduction. + * @returns a promise for the final value of the reduction. + */ + reduce( + fn: (previous: any, data: any, options?: Pick) => T, + initial?: undefined, + options?: Pick, + ): Promise; + reduce( + fn: (previous: T, data: any, options?: Pick) => T, + initial: T, + options?: Pick, + ): Promise; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()`will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "pause"): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Calls `readable.destroy()` with an `AbortError` and returns a promise that fulfills when the stream is finished. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + import WritableOptions = internal.WritableOptions; + class WritableBase extends Stream implements NodeJS.WritableStream { + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored, or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit `'drain'`. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * const fs = require('node:fs'); + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error?: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends ReadableBase { + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + readableStream: streamWeb.ReadableStream, + options?: Pick, + ): Readable; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?( + this: Writable, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends WritableBase { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + writableStream: streamWeb.WritableStream, + options?: Pick, + ): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error?: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends ReadableBase implements WritableBase { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `true`. + * + * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is + * emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from( + src: + | Stream + | NodeBlob + | ArrayBuffer + | string + | Iterable + | AsyncIterable + | AsyncGeneratorFunction + | Promise + | Object, + ): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + /** + * A utility method for creating a web `ReadableStream` and `WritableStream` from a `Duplex`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamDuplex: Duplex): { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }; + /** + * A utility method for creating a `Duplex` from a web `ReadableStream` and `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + duplexStream: { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }, + options?: Pick< + DuplexOptions, + "allowHalfOpen" | "decodeStrings" | "encoding" | "highWaterMark" | "objectMode" | "signal" + >, + ): Duplex; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. drain + * 4. end + * 5. error + * 6. finish + * 7. pause + * 8. pipe + * 9. readable + * 10. resume + * 11. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pause"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pause", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pause", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?( + this: Transform, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error?: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * A stream to attach a signal to. + * + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream, and `controller.error(new + * AbortError())` for webstreams. + * + * ```js + * const fs = require('node:fs'); + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * + * Or using an `AbortSignal` with a ReadableStream: + * + * ```js + * const controller = new AbortController(); + * const rs = new ReadableStream({ + * start(controller) { + * controller.enqueue('hello'); + * controller.enqueue('world'); + * controller.close(); + * }, + * }); + * + * addAbortSignal(controller.signal, rs); + * + * finished(rs, (err) => { + * if (err) { + * if (err.name === 'AbortError') { + * // The operation was cancelled + * } + * } + * }); + * + * const reader = rs.getReader(); + * + * reader.read().then(({ value, done }) => { + * console.log(value); // hello + * console.log(done); // false + * controller.abort(); + * }); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream a stream to attach a signal to + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + /** + * Returns the default highWaterMark used by streams. + * Defaults to `16384` (16 KiB), or `16` for `objectMode`. + * @since v19.9.0 + * @param objectMode + */ + function getDefaultHighWaterMark(objectMode: boolean): number; + /** + * Sets the default highWaterMark used by streams. + * @since v19.9.0 + * @param objectMode + * @param value highWaterMark value + */ + function setDefaultHighWaterMark(objectMode: boolean, value: number): void; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A readable and/or writable stream/webstream. + * + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * const { finished } = require('node:stream'); + * const fs = require('node:fs'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. + * + * The `finished` API provides `promise version`. + * + * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @return A cleanup function which removes all registered listeners. + */ + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options: FinishedOptions, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + namespace finished { + function __promisify__( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | (( + source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable + : S, + ) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends + PipelineTransformSource ? + | NodeJS.WritableStream + | PipelineDestinationIterableFunction + | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends + PipelineDestinationPromiseFunction ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends + PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal?: AbortSignal | undefined; + end?: boolean | undefined; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * const { pipeline } = require('node:stream'); + * const fs = require('node:fs'); + * const zlib = require('node:zlib'); + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * }, + * ); + * ``` + * + * The `pipeline` API provides a `promise version`. + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * const fs = require('node:fs'); + * const http = require('node:http'); + * const { pipeline } = require('node:stream'); + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback?: (err: NodeJS.ErrnoException | null) => void, + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array< + NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void) + > + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0, v16.14.0 + * @experimental + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + /** + * Returns whether the stream is readable. + * @since v17.4.0, v16.14.0 + * @experimental + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module "node:stream" { + import stream = require("stream"); + export = stream; +} diff --git a/node_modules/@types/node/stream/consumers.d.ts b/node_modules/@types/node/stream/consumers.d.ts new file mode 100644 index 0000000..5ad9cba --- /dev/null +++ b/node_modules/@types/node/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module "stream/consumers" { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from "node:stream"; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; +} +declare module "node:stream/consumers" { + export * from "stream/consumers"; +} diff --git a/node_modules/@types/node/stream/promises.d.ts b/node_modules/@types/node/stream/promises.d.ts new file mode 100644 index 0000000..6eac5b7 --- /dev/null +++ b/node_modules/@types/node/stream/promises.d.ts @@ -0,0 +1,83 @@ +declare module "stream/promises" { + import { + FinishedOptions, + PipelineDestination, + PipelineOptions, + PipelinePromise, + PipelineSource, + PipelineTransform, + } from "node:stream"; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module "node:stream/promises" { + export * from "stream/promises"; +} diff --git a/node_modules/@types/node/stream/web.d.ts b/node_modules/@types/node/stream/web.d.ts new file mode 100644 index 0000000..bfbdc8f --- /dev/null +++ b/node_modules/@types/node/stream/web.d.ts @@ -0,0 +1,367 @@ +declare module "stream/web" { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + interface ReadableStreamDefaultReadValueResult { + done: false; + value: T; + } + interface ReadableStreamDefaultReadDoneResult { + done: true; + value?: undefined; + } + type ReadableStreamController = ReadableStreamDefaultController; + type ReadableStreamDefaultReadResult = + | ReadableStreamDefaultReadValueResult + | ReadableStreamDefaultReadDoneResult; + interface ReadableStreamReadValueResult { + done: false; + value: T; + } + interface ReadableStreamReadDoneResult { + done: true; + value?: T; + } + type ReadableStreamReadResult = ReadableStreamReadValueResult | ReadableStreamReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(): ReadableStreamDefaultReader; + getReader(options: { mode: "byob" }): ReadableStreamBYOBReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): AsyncIterableIterator; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + const ReadableStream: { + prototype: ReadableStream; + from(iterable: Iterable | AsyncIterable): ReadableStream; + new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new(underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + interface ReadableStreamBYOBReader extends ReadableStreamGenericReader { + read(view: T): Promise>; + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new(stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: any; + const ReadableStreamBYOBRequest: any; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new(): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new(): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new( + transformer?: Transformer, + writableStrategy?: QueuingStrategy, + readableStrategy?: QueuingStrategy, + ): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new(): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new(underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new(stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new(): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new(init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new(init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: "utf-8"; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new(): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new(encoding?: string, options?: TextDecoderOptions): TextDecoderStream; + }; + interface CompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const CompressionStream: { + prototype: CompressionStream; + new(format: string): CompressionStream; + }; + interface DecompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const DecompressionStream: { + prototype: DecompressionStream; + new(format: string): DecompressionStream; + }; +} +declare module "node:stream/web" { + export * from "stream/web"; +} diff --git a/node_modules/@types/node/string_decoder.d.ts b/node_modules/@types/node/string_decoder.d.ts new file mode 100644 index 0000000..cd76bf8 --- /dev/null +++ b/node_modules/@types/node/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `node:string_decoder` module provides an API for decoding `Buffer` objects + * into strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); // Prints: ¢ + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); // Prints: € + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); // Prints: € + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/string_decoder.js) + */ +declare module "string_decoder" { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to`stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer The bytes to decode. + */ + write(buffer: string | Buffer | NodeJS.ArrayBufferView): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()`is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer The bytes to decode. + */ + end(buffer?: string | Buffer | NodeJS.ArrayBufferView): string; + } +} +declare module "node:string_decoder" { + export * from "string_decoder"; +} diff --git a/node_modules/@types/node/test.d.ts b/node_modules/@types/node/test.d.ts new file mode 100644 index 0000000..149c9e0 --- /dev/null +++ b/node_modules/@types/node/test.d.ts @@ -0,0 +1,1465 @@ +/** + * The `node:test` module facilitates the creation of JavaScript tests. + * To access it: + * + * ```js + * import test from 'node:test'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test'; + * ``` + * + * Tests created via the `test` module consist of a single function that is + * processed in one of three ways: + * + * 1. A synchronous function that is considered failing if it throws an exception, + * and is considered passing otherwise. + * 2. A function that returns a `Promise` that is considered failing if the`Promise` rejects, and is considered passing if the `Promise` fulfills. + * 3. A function that receives a callback function. If the callback receives any + * truthy value as its first argument, the test is considered failing. If a + * falsy value is passed as the first argument to the callback, the test is + * considered passing. If the test function receives a callback function and + * also returns a `Promise`, the test will fail. + * + * The following example illustrates how tests are written using the`test` module. + * + * ```js + * test('synchronous passing test', (t) => { + * // This test passes because it does not throw an exception. + * assert.strictEqual(1, 1); + * }); + * + * test('synchronous failing test', (t) => { + * // This test fails because it throws an exception. + * assert.strictEqual(1, 2); + * }); + * + * test('asynchronous passing test', async (t) => { + * // This test passes because the Promise returned by the async + * // function is settled and not rejected. + * assert.strictEqual(1, 1); + * }); + * + * test('asynchronous failing test', async (t) => { + * // This test fails because the Promise returned by the async + * // function is rejected. + * assert.strictEqual(1, 2); + * }); + * + * test('failing test using Promises', (t) => { + * // Promises can be used directly as well. + * return new Promise((resolve, reject) => { + * setImmediate(() => { + * reject(new Error('this will cause the test to fail')); + * }); + * }); + * }); + * + * test('callback passing test', (t, done) => { + * // done() is the callback function. When the setImmediate() runs, it invokes + * // done() with no arguments. + * setImmediate(done); + * }); + * + * test('callback failing test', (t, done) => { + * // When the setImmediate() runs, done() is invoked with an Error object and + * // the test fails. + * setImmediate(() => { + * done(new Error('callback failure')); + * }); + * }); + * ``` + * + * If any tests fail, the process exit code is set to `1`. + * @since v18.0.0, v16.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.4.0/lib/test.js) + */ +declare module "node:test" { + import { Readable } from "node:stream"; + import { AsyncResource } from "node:async_hooks"; + /** + * **Note:**`shard` is used to horizontally parallelize test running across + * machines or processes, ideal for large-scale executions across varied + * environments. It's incompatible with `watch` mode, tailored for rapid + * code iteration by automatically rerunning tests on file changes. + * + * ```js + * import { tap } from 'node:test/reporters'; + * import { run } from 'node:test'; + * import process from 'node:process'; + * import path from 'node:path'; + * + * run({ files: [path.resolve('./tests/test.js')] }) + * .compose(tap) + * .pipe(process.stdout); + * ``` + * @since v18.9.0, v16.19.0 + * @param options Configuration options for running tests. The following properties are supported: + */ + function run(options?: RunOptions): TestsStream; + /** + * The `test()` function is the value imported from the `test` module. Each + * invocation of this function results in reporting the test to the `TestsStream`. + * + * The `TestContext` object passed to the `fn` argument can be used to perform + * actions related to the current test. Examples include skipping the test, adding + * additional diagnostic information, or creating subtests. + * + * `test()` returns a `Promise` that fulfills once the test completes. + * if `test()` is called within a `describe()` block, it fulfills immediately. + * The return value can usually be discarded for top level tests. + * However, the return value from subtests should be used to prevent the parent + * test from finishing first and cancelling the subtest + * as shown in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * + * The `timeout` option can be used to fail the test if it takes longer than`timeout` milliseconds to complete. However, it is not a reliable mechanism for + * canceling tests because a running test might block the application thread and + * thus prevent the scheduled cancellation. + * @since v18.0.0, v16.17.0 + * @param [name='The name'] The name of the test, which is displayed when reporting test results. + * @param options Configuration options for the test. The following properties are supported: + * @param [fn='A no-op function'] The function under test. The first argument to this function is a {@link TestContext} object. If the test uses callbacks, the callback function is passed as the + * second argument. + * @return Fulfilled with `undefined` once the test completes, or immediately if the test runs within {@link describe}. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + namespace test { + export { after, afterEach, before, beforeEach, describe, it, mock, only, run, skip, test, todo }; + } + /** + * The `describe()` function imported from the `node:test` module. Each + * invocation of this function results in the creation of a Subtest. + * After invocation of top level `describe` functions, + * all top level tests and suites will execute. + * @param [name='The name'] The name of the suite, which is displayed when reporting test results. + * @param options Configuration options for the suite. supports the same options as `test([name][, options][, fn])`. + * @param [fn='A no-op function'] The function under suite declaring all subtests and subsuites. The first argument to this function is a {@link SuiteContext} object. + * @return Immediately fulfilled with `undefined`. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function describe(name?: string, fn?: SuiteFn): Promise; + function describe(options?: TestOptions, fn?: SuiteFn): Promise; + function describe(fn?: SuiteFn): Promise; + namespace describe { + /** + * Shorthand for skipping a suite, same as `describe([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function skip(name?: string, fn?: SuiteFn): Promise; + function skip(options?: TestOptions, fn?: SuiteFn): Promise; + function skip(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `TODO`, same as `describe([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function todo(name?: string, fn?: SuiteFn): Promise; + function todo(options?: TestOptions, fn?: SuiteFn): Promise; + function todo(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `only`, same as `describe([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function only(name?: string, fn?: SuiteFn): Promise; + function only(options?: TestOptions, fn?: SuiteFn): Promise; + function only(fn?: SuiteFn): Promise; + } + /** + * Shorthand for `test()`. + * + * The `it()` function is imported from the `node:test` module. + * @since v18.6.0, v16.17.0 + */ + function it(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function it(name?: string, fn?: TestFn): Promise; + function it(options?: TestOptions, fn?: TestFn): Promise; + function it(fn?: TestFn): Promise; + namespace it { + /** + * Shorthand for skipping a test, same as `it([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `it([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `it([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + } + /** + * Shorthand for skipping a test, same as `test([name], { skip: true }[, fn])`. + * @since v20.2.0 + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `test([name], { todo: true }[, fn])`. + * @since v20.2.0 + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `test([name], { only: true }[, fn])`. + * @since v20.2.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + /** + * The type of a function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is passed as + * the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => void | Promise; + /** + * The type of a function under Suite. + */ + type SuiteFn = (s: SuiteContext) => void | Promise; + interface TestShard { + /** + * A positive integer between 1 and `` that specifies the index of the shard to run. + */ + index: number; + /** + * A positive integer that specifies the total number of shards to split the test files to. + */ + total: number; + } + interface RunOptions { + /** + * If a number is provided, then that many files would run in parallel. + * If truthy, it would run (number of cpu cores - 1) files in parallel. + * If falsy, it would only run one file at a time. + * If unspecified, subtests inherit this value from their parent. + * @default true + */ + concurrency?: number | boolean | undefined; + /** + * An array containing the list of files to run. + * If unspecified, the test runner execution model will be used. + */ + files?: readonly string[] | undefined; + /** + * Allows aborting an in-progress test execution. + * @default undefined + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the test will fail after. + * If unspecified, subtests inherit this value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + /** + * Sets inspector port of test child process. + * If a nullish value is provided, each process gets its own port, + * incremented from the primary's `process.debugPort`. + */ + inspectPort?: number | (() => number) | undefined; + /** + * That can be used to only run tests whose name matches the provided pattern. + * Test name patterns are interpreted as JavaScript regular expressions. + * For each test that is executed, any corresponding test hooks, such as `beforeEach()`, are also run. + */ + testNamePatterns?: string | RegExp | string[] | RegExp[]; + /** + * If truthy, the test context will only run tests that have the `only` option set + */ + only?: boolean; + /** + * A function that accepts the TestsStream instance and can be used to setup listeners before any tests are run. + */ + setup?: (root: Test) => void | Promise; + /** + * Whether to run in watch mode or not. + * @default false + */ + watch?: boolean | undefined; + /** + * Running tests in a specific shard. + * @default undefined + */ + shard?: TestShard | undefined; + } + class Test extends AsyncResource { + concurrency: number; + nesting: number; + only: boolean; + reporter: TestsStream; + runOnlySubtests: boolean; + testNumber: number; + timeout: number | null; + } + /** + * A successful call to `run()` method will return a new `TestsStream` object, streaming a series of events representing the execution of the tests.`TestsStream` will emit events, in the + * order of the tests definition + * @since v18.9.0, v16.19.0 + */ + class TestsStream extends Readable implements NodeJS.ReadableStream { + addListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + addListener(event: "test:fail", listener: (data: TestFail) => void): this; + addListener(event: "test:pass", listener: (data: TestPass) => void): this; + addListener(event: "test:plan", listener: (data: TestPlan) => void): this; + addListener(event: "test:start", listener: (data: TestStart) => void): this; + addListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + addListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: "test:diagnostic", data: DiagnosticData): boolean; + emit(event: "test:fail", data: TestFail): boolean; + emit(event: "test:pass", data: TestPass): boolean; + emit(event: "test:plan", data: TestPlan): boolean; + emit(event: "test:start", data: TestStart): boolean; + emit(event: "test:stderr", data: TestStderr): boolean; + emit(event: "test:stdout", data: TestStdout): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + on(event: "test:fail", listener: (data: TestFail) => void): this; + on(event: "test:pass", listener: (data: TestPass) => void): this; + on(event: "test:plan", listener: (data: TestPlan) => void): this; + on(event: "test:start", listener: (data: TestStart) => void): this; + on(event: "test:stderr", listener: (data: TestStderr) => void): this; + on(event: "test:stdout", listener: (data: TestStdout) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + once(event: "test:fail", listener: (data: TestFail) => void): this; + once(event: "test:pass", listener: (data: TestPass) => void): this; + once(event: "test:plan", listener: (data: TestPlan) => void): this; + once(event: "test:start", listener: (data: TestStart) => void): this; + once(event: "test:stderr", listener: (data: TestStderr) => void): this; + once(event: "test:stdout", listener: (data: TestStdout) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependListener(event: "test:start", listener: (data: TestStart) => void): this; + prependListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependOnceListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependOnceListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependOnceListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependOnceListener(event: "test:start", listener: (data: TestStart) => void): this; + prependOnceListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependOnceListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + /** + * An instance of `TestContext` is passed to each test function in order to + * interact with the test runner. However, the `TestContext` constructor is not + * exposed as part of the API. + * @since v18.0.0, v16.17.0 + */ + class TestContext { + /** + * This function is used to create a hook running before subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v20.1.0 + */ + before: typeof before; + /** + * This function is used to create a hook running before each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + beforeEach: typeof beforeEach; + /** + * This function is used to create a hook that runs after the current test finishes. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.13.0 + */ + after: typeof after; + /** + * This function is used to create a hook running after each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + afterEach: typeof afterEach; + /** + * This function is used to write diagnostics to the output. Any diagnostic + * information is included at the end of the test's results. This function does + * not return a value. + * + * ```js + * test('top level test', (t) => { + * t.diagnostic('A diagnostic message'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Message to be reported. + */ + diagnostic(message: string): void; + /** + * The name of the test. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that + * have the `only` option set. Otherwise, all tests are run. If Node.js was not + * started with the `--test-only` command-line option, this function is a + * no-op. + * + * ```js + * test('top level test', (t) => { + * // The test context can be set to run subtests with the 'only' option. + * t.runOnly(true); + * return Promise.all([ + * t.test('this subtest is now skipped'), + * t.test('this subtest is run', { only: true }), + * ]); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param shouldRunOnlyTests Whether or not to run `only` tests. + */ + runOnly(shouldRunOnlyTests: boolean): void; + /** + * ```js + * test('top level test', async (t) => { + * await fetch('some/uri', { signal: t.signal }); + * }); + * ``` + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + /** + * This function causes the test's output to indicate the test as skipped. If`message` is provided, it is included in the output. Calling `skip()` does + * not terminate execution of the test function. This function does not return a + * value. + * + * ```js + * test('top level test', (t) => { + * // Make sure to return here as well if the test contains additional logic. + * t.skip('this is skipped'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional skip message. + */ + skip(message?: string): void; + /** + * This function adds a `TODO` directive to the test's output. If `message` is + * provided, it is included in the output. Calling `todo()` does not terminate + * execution of the test function. This function does not return a value. + * + * ```js + * test('top level test', (t) => { + * // This test is marked as `TODO` + * t.todo('this is a todo'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional `TODO` message. + */ + todo(message?: string): void; + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. This first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + /** + * Each test provides its own MockTracker instance. + */ + readonly mock: MockTracker; + } + /** + * An instance of `SuiteContext` is passed to each suite function in order to + * interact with the test runner. However, the `SuiteContext` constructor is not + * exposed as part of the API. + * @since v18.7.0, v16.17.0 + */ + class SuiteContext { + /** + * The name of the suite. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * Can be used to abort test subtasks when the test has been aborted. + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + } + interface TestOptions { + /** + * If a number is provided, then that many tests would run in parallel. + * If truthy, it would run (number of cpu cores - 1) tests in parallel. + * For subtests, it will be `Infinity` tests in parallel. + * If falsy, it would only run one test at a time. + * If unspecified, subtests inherit this value from their parent. + * @default false + */ + concurrency?: number | boolean | undefined; + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean | undefined; + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal | undefined; + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string | undefined; + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number | undefined; + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string | undefined; + } + /** + * This function is used to create a hook running before running a suite. + * + * ```js + * describe('tests', async () => { + * before(() => console.log('about to run some test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function before(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running after running a suite. + * + * ```js + * describe('tests', async () => { + * after(() => console.log('finished running tests')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function after(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * before each subtest of the current suite. + * + * ```js + * describe('tests', async () => { + * beforeEach(() => console.log('about to run a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function beforeEach(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * after each subtest of the current test. + * + * ```js + * describe('tests', async () => { + * afterEach(() => console.log('finished running a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function afterEach(fn?: HookFn, options?: HookOptions): void; + /** + * The hook function. If the hook uses callbacks, the callback function is passed as the + * second argument. + */ + type HookFn = (s: SuiteContext, done: (result?: any) => void) => any; + /** + * Configuration options for hooks. + * @since v18.8.0 + */ + interface HookOptions { + /** + * Allows aborting an in-progress hook. + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the hook will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + } + interface MockFunctionOptions { + /** + * The number of times that the mock will use the behavior of `implementation`. + * Once the mock function has been called `times` times, + * it will automatically restore the behavior of `original`. + * This value must be an integer greater than zero. + * @default Infinity + */ + times?: number | undefined; + } + interface MockMethodOptions extends MockFunctionOptions { + /** + * If `true`, `object[methodName]` is treated as a getter. + * This option cannot be used with the `setter` option. + */ + getter?: boolean | undefined; + /** + * If `true`, `object[methodName]` is treated as a setter. + * This option cannot be used with the `getter` option. + */ + setter?: boolean | undefined; + } + type Mock = F & { + mock: MockFunctionContext; + }; + type NoOpFunction = (...args: any[]) => undefined; + type FunctionPropertyNames = { + [K in keyof T]: T[K] extends Function ? K : never; + }[keyof T]; + /** + * The `MockTracker` class is used to manage mocking functionality. The test runner + * module provides a top level `mock` export which is a `MockTracker` instance. + * Each test also provides its own `MockTracker` instance via the test context's`mock` property. + * @since v19.1.0, v18.13.0 + */ + class MockTracker { + /** + * This function is used to create a mock function. + * + * The following example creates a mock function that increments a counter by one + * on each invocation. The `times` option is used to modify the mock behavior such + * that the first two invocations add two to the counter instead of one. + * + * ```js + * test('mocks a counting function', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne, addTwo, { times: 2 }); + * + * assert.strictEqual(fn(), 2); + * assert.strictEqual(fn(), 4); + * assert.strictEqual(fn(), 5); + * assert.strictEqual(fn(), 6); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param [original='A no-op function'] An optional function to create a mock on. + * @param implementation An optional function used as the mock implementation for `original`. This is useful for creating mocks that exhibit one behavior for a specified number of calls and + * then restore the behavior of `original`. + * @param options Optional configuration options for the mock function. The following properties are supported: + * @return The mocked function. The mocked function contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked function. + */ + fn(original?: F, options?: MockFunctionOptions): Mock; + fn( + original?: F, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock; + /** + * This function is used to create a mock on an existing object method. The + * following example demonstrates how a mock is created on an existing object + * method. + * + * ```js + * test('spies on an object method', (t) => { + * const number = { + * value: 5, + * subtract(a) { + * return this.value - a; + * }, + * }; + * + * t.mock.method(number, 'subtract'); + * assert.strictEqual(number.subtract.mock.calls.length, 0); + * assert.strictEqual(number.subtract(3), 2); + * assert.strictEqual(number.subtract.mock.calls.length, 1); + * + * const call = number.subtract.mock.calls[0]; + * + * assert.deepStrictEqual(call.arguments, [3]); + * assert.strictEqual(call.result, 2); + * assert.strictEqual(call.error, undefined); + * assert.strictEqual(call.target, undefined); + * assert.strictEqual(call.this, number); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param object The object whose method is being mocked. + * @param methodName The identifier of the method on `object` to mock. If `object[methodName]` is not a function, an error is thrown. + * @param implementation An optional function used as the mock implementation for `object[methodName]`. + * @param options Optional configuration options for the mock method. The following properties are supported: + * @return The mocked method. The mocked method contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked method. + */ + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation: Implementation, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method( + object: MockedObject, + methodName: keyof MockedObject, + options: MockMethodOptions, + ): Mock; + method( + object: MockedObject, + methodName: keyof MockedObject, + implementation: Function, + options: MockMethodOptions, + ): Mock; + + /** + * This function is syntax sugar for `MockTracker.method` with `options.getter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<() => MockedObject[MethodName]>; + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<(() => MockedObject[MethodName]) | Implementation>; + /** + * This function is syntax sugar for `MockTracker.method` with `options.setter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<(value: MockedObject[MethodName]) => void>; + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<((value: MockedObject[MethodName]) => void) | Implementation>; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker` and disassociates the mocks from the`MockTracker` instance. Once disassociated, the mocks can still be used, but the`MockTracker` instance can no longer be + * used to reset their behavior or + * otherwise interact with them. + * + * After each test completes, this function is called on the test context's`MockTracker`. If the global `MockTracker` is used extensively, calling this + * function manually is recommended. + * @since v19.1.0, v18.13.0 + */ + reset(): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker`. Unlike `mock.reset()`, `mock.restoreAll()` does + * not disassociate the mocks from the `MockTracker` instance. + * @since v19.1.0, v18.13.0 + */ + restoreAll(): void; + timers: MockTimers; + } + const mock: MockTracker; + interface MockFunctionCall< + F extends Function, + ReturnType = F extends (...args: any) => infer T ? T + : F extends abstract new(...args: any) => infer T ? T + : unknown, + Args = F extends (...args: infer Y) => any ? Y + : F extends abstract new(...args: infer Y) => any ? Y + : unknown[], + > { + /** + * An array of the arguments passed to the mock function. + */ + arguments: Args; + /** + * If the mocked function threw then this property contains the thrown value. + */ + error: unknown | undefined; + /** + * The value returned by the mocked function. + * + * If the mocked function threw, it will be `undefined`. + */ + result: ReturnType | undefined; + /** + * An `Error` object whose stack can be used to determine the callsite of the mocked function invocation. + */ + stack: Error; + /** + * If the mocked function is a constructor, this field contains the class being constructed. + * Otherwise this will be `undefined`. + */ + target: F extends abstract new(...args: any) => any ? F : undefined; + /** + * The mocked function's `this` value. + */ + this: unknown; + } + /** + * The `MockFunctionContext` class is used to inspect or manipulate the behavior of + * mocks created via the `MockTracker` APIs. + * @since v19.1.0, v18.13.0 + */ + class MockFunctionContext { + /** + * A getter that returns a copy of the internal array used to track calls to the + * mock. Each entry in the array is an object with the following properties. + * @since v19.1.0, v18.13.0 + */ + readonly calls: Array>; + /** + * This function returns the number of times that this mock has been invoked. This + * function is more efficient than checking `ctx.calls.length` because `ctx.calls`is a getter that creates a copy of the internal call tracking array. + * @since v19.1.0, v18.13.0 + * @return The number of times that this mock has been invoked. + */ + callCount(): number; + /** + * This function is used to change the behavior of an existing mock. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, and then changes the mock implementation to a different function. + * + * ```js + * test('changes a mock behavior', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementation(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 5); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's new implementation. + */ + mockImplementation(implementation: Function): void; + /** + * This function is used to change the behavior of an existing mock for a single + * invocation. Once invocation `onCall` has occurred, the mock will revert to + * whatever behavior it would have used had `mockImplementationOnce()` not been + * called. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, changes the mock implementation to a different function for the + * next invocation, and then resumes its previous behavior. + * + * ```js + * test('changes a mock behavior once', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementationOnce(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 4); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's implementation for the invocation number specified by `onCall`. + * @param onCall The invocation number that will use `implementation`. If the specified invocation has already occurred then an exception is thrown. + */ + mockImplementationOnce(implementation: Function, onCall?: number): void; + /** + * Resets the call history of the mock function. + * @since v19.3.0, v18.13.0 + */ + resetCalls(): void; + /** + * Resets the implementation of the mock function to its original behavior. The + * mock can still be used after calling this function. + * @since v19.1.0, v18.13.0 + */ + restore(): void; + } + type Timer = "setInterval" | "setTimeout" | "setImmediate" | "Date"; + + interface MockTimersOptions { + apis: Timer[]; + now?: number | Date; + } + /** + * Mocking timers is a technique commonly used in software testing to simulate and + * control the behavior of timers, such as `setInterval` and `setTimeout`, + * without actually waiting for the specified time intervals. + * + * The MockTimers API also allows for mocking of the `Date` constructor and + * `setImmediate`/`clearImmediate` functions. + * + * The `MockTracker` provides a top-level `timers` export + * which is a `MockTimers` instance. + * @since v20.4.0 + * @experimental + */ + class MockTimers { + /** + * Enables timer mocking for the specified timers. + * + * **Note:** When you enable mocking for a specific timer, its associated + * clear function will also be implicitly mocked. + * + * **Note:** Mocking `Date` will affect the behavior of the mocked timers + * as they use the same internal clock. + * + * Example usage without setting initial time: + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['setInterval', 'Date'], now: 1234 }); + * ``` + * + * The above example enables mocking for the `Date` constructor, `setInterval` timer and + * implicitly mocks the `clearInterval` function. Only the `Date` constructor from `globalThis`, + * `setInterval` and `clearInterval` functions from `node:timers`,`node:timers/promises`, and `globalThis` will be mocked. + * + * Example usage with initial time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: 1000 }); + * ``` + * + * Example usage with initial Date object as time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: new Date() }); + * ``` + * + * Alternatively, if you call `mock.timers.enable()` without any parameters: + * + * All timers (`'setInterval'`, `'clearInterval'`, `'Date'`, `'setImmediate'`, `'clearImmediate'`, `'setTimeout'`, and `'clearTimeout'`) + * will be mocked. + * + * The `setInterval`, `clearInterval`, `setTimeout`, and `clearTimeout` functions from `node:timers`, `node:timers/promises`, + * and `globalThis` will be mocked. + * The `Date` constructor from `globalThis` will be mocked. + * + * If there is no initial epoch set, the initial date will be based on 0 in the Unix epoch. This is `January 1st, 1970, 00:00:00 UTC`. You can set an initial date by passing a now property to the `.enable()` method. This value will be used as the initial date for the mocked Date object. It can either be a positive integer, or another Date object. + * @since v20.4.0 + */ + enable(options?: MockTimersOptions): void; + /** + * You can use the `.setTime()` method to manually move the mocked date to another time. This method only accepts a positive integer. + * Note: This method will execute any mocked timers that are in the past from the new time. + * In the below example we are setting a new time for the mocked date. + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * test('sets the time of a date object', (context) => { + * // Optionally choose what to mock + * context.mock.timers.enable({ apis: ['Date'], now: 100 }); + * assert.strictEqual(Date.now(), 100); + * // Advance in time will also advance the date + * context.mock.timers.setTime(1000); + * context.mock.timers.tick(200); + * assert.strictEqual(Date.now(), 1200); + * }); + * ``` + */ + setTime(time: number): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTimers` instance and disassociates the mocks + * from the `MockTracker` instance. + * + * **Note:** After each test completes, this function is called on + * the test context's `MockTracker`. + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.reset(); + * ``` + * @since v20.4.0 + */ + reset(): void; + /** + * Advances time for all mocked timers. + * + * **Note:** This diverges from how `setTimeout` in Node.js behaves and accepts + * only positive numbers. In Node.js, `setTimeout` with negative numbers is + * only supported for web compatibility reasons. + * + * The following example mocks a `setTimeout` function and + * by using `.tick` advances in + * time triggering all pending timers. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Alternativelly, the `.tick` function can be called many times + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * const nineSecs = 9000; + * setTimeout(fn, nineSecs); + * + * const twoSeconds = 3000; + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Advancing time using `.tick` will also advance the time for any `Date` object + * created after the mock was enabled (if `Date` was also set to be mocked). + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * assert.strictEqual(Date.now(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * assert.strictEqual(fn.mock.callCount(), 1); + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * @since v20.4.0 + */ + tick(milliseconds: number): void; + /** + * Triggers all pending mocked timers immediately. If the `Date` object is also + * mocked, it will also advance the `Date` object to the furthest timer's time. + * + * The example below triggers all pending timers immediately, + * causing them to execute without any delay. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('runAll functions following the given order', (context) => { + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * const results = []; + * setTimeout(() => results.push(1), 9999); + * + * // Notice that if both timers have the same timeout, + * // the order of execution is guaranteed + * setTimeout(() => results.push(3), 8888); + * setTimeout(() => results.push(2), 8888); + * + * assert.deepStrictEqual(results, []); + * + * context.mock.timers.runAll(); + * assert.deepStrictEqual(results, [3, 2, 1]); + * // The Date object is also advanced to the furthest timer's time + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * + * **Note:** The `runAll()` function is specifically designed for + * triggering timers in the context of timer mocking. + * It does not have any effect on real-time system + * clocks or actual timers outside of the mocking environment. + * @since v20.4.0 + */ + runAll(): void; + /** + * Calls {@link MockTimers.reset()}. + */ + [Symbol.dispose](): void; + } + export { + after, + afterEach, + before, + beforeEach, + describe, + it, + Mock, + mock, + only, + run, + skip, + test, + test as default, + todo, + }; +} + +interface TestLocationInfo { + /** + * The column number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + column?: number; + /** + * The path of the test file, `undefined` if test is not ran through a file. + */ + file?: string; + /** + * The line number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + line?: number; +} +interface DiagnosticData extends TestLocationInfo { + /** + * The diagnostic message. + */ + message: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestFail extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The error thrown by the test. + */ + error: Error; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPass extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPlan extends TestLocationInfo { + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The number of subtests that have ran. + */ + count: number; +} +interface TestStart extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestStderr extends TestLocationInfo { + /** + * The message written to `stderr` + */ + message: string; +} +interface TestStdout extends TestLocationInfo { + /** + * The message written to `stdout` + */ + message: string; +} +interface TestEnqueue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestDequeue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} + +/** + * The `node:test/reporters` module exposes the builtin-reporters for `node:test`. + * To access it: + * + * ```js + * import test from 'node:test/reporters'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test/reporters'; + * ``` + * @since v19.9.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/test/reporters.js) + */ +declare module "node:test/reporters" { + import { Transform, TransformOptions } from "node:stream"; + + type TestEvent = + | { type: "test:diagnostic"; data: DiagnosticData } + | { type: "test:fail"; data: TestFail } + | { type: "test:pass"; data: TestPass } + | { type: "test:plan"; data: TestPlan } + | { type: "test:start"; data: TestStart } + | { type: "test:stderr"; data: TestStderr } + | { type: "test:stdout"; data: TestStdout } + | { type: "test:enqueue"; data: TestEnqueue } + | { type: "test:dequeue"; data: TestDequeue } + | { type: "test:watch:drained" }; + type TestEventGenerator = AsyncGenerator; + + /** + * The `dot` reporter outputs the test results in a compact format, + * where each passing test is represented by a `.`, + * and each failing test is represented by a `X`. + */ + function dot(source: TestEventGenerator): AsyncGenerator<"\n" | "." | "X", void>; + /** + * The `tap` reporter outputs the test results in the [TAP](https://testanything.org/) format. + */ + function tap(source: TestEventGenerator): AsyncGenerator; + /** + * The `spec` reporter outputs the test results in a human-readable format. + */ + class Spec extends Transform { + constructor(); + } + /** + * The `junit` reporter outputs test results in a jUnit XML format + */ + function junit(source: TestEventGenerator): AsyncGenerator; + class Lcov extends Transform { + constructor(opts?: TransformOptions); + } + export { dot, junit, Lcov as lcov, Spec as spec, tap, TestEvent }; +} diff --git a/node_modules/@types/node/timers.d.ts b/node_modules/@types/node/timers.d.ts new file mode 100644 index 0000000..039f31f --- /dev/null +++ b/node_modules/@types/node/timers.d.ts @@ -0,0 +1,240 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to call `require('node:timers')` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/timers.js) + */ +declare module "timers" { + import { Abortable } from "node:events"; + import { + setImmediate as setImmediatePromise, + setInterval as setIntervalPromise, + setTimeout as setTimeoutPromise, + } from "node:timers/promises"; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + /** + * This object is created internally and is returned from `setImmediate()`. It + * can be passed to `clearImmediate()` in order to cancel the scheduled + * actions. + * + * By default, when an immediate is scheduled, the Node.js event loop will continue + * running as long as the immediate is active. The `Immediate` object returned by `setImmediate()` exports both `immediate.ref()` and `immediate.unref()`functions that can be used to + * control this default behavior. + */ + class Immediate implements RefCounted { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Immediate` is active. Calling `immediate.ref()` multiple times will have no + * effect. + * + * By default, all `Immediate` objects are "ref'ed", making it normally unnecessary + * to call `immediate.ref()` unless `immediate.unref()` had been called previously. + * @since v9.7.0 + * @return a reference to `immediate` + */ + ref(): this; + /** + * When called, the active `Immediate` object will not require the Node.js event + * loop to remain active. If there is no other activity keeping the event loop + * running, the process may exit before the `Immediate` object's callback is + * invoked. Calling `immediate.unref()` multiple times will have no effect. + * @since v9.7.0 + * @return a reference to `immediate` + */ + unref(): this; + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + /** + * Cancels the immediate. This is similar to calling `clearImmediate()`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + /** + * This object is created internally and is returned from `setTimeout()` and `setInterval()`. It can be passed to either `clearTimeout()` or `clearInterval()` in order to cancel the + * scheduled actions. + * + * By default, when a timer is scheduled using either `setTimeout()` or `setInterval()`, the Node.js event loop will continue running as long as the + * timer is active. Each of the `Timeout` objects returned by these functions + * export both `timeout.ref()` and `timeout.unref()` functions that can be used to + * control this default behavior. + */ + class Timeout implements Timer { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Timeout` is active. Calling `timeout.ref()` multiple times will have no effect. + * + * By default, all `Timeout` objects are "ref'ed", making it normally unnecessary + * to call `timeout.ref()` unless `timeout.unref()` had been called previously. + * @since v0.9.1 + * @return a reference to `timeout` + */ + ref(): this; + /** + * When called, the active `Timeout` object will not require the Node.js event loop + * to remain active. If there is no other activity keeping the event loop running, + * the process may exit before the `Timeout` object's callback is invoked. Calling`timeout.unref()` multiple times will have no effect. + * @since v0.9.1 + * @return a reference to `timeout` + */ + unref(): this; + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + /** + * Cancels the timeout. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + } + /** + * Schedules execution of a one-time `callback` after `delay` milliseconds. + * + * The `callback` will likely not be invoked in precisely `delay` milliseconds. + * Node.js makes no guarantees about the exact timing of when callbacks will fire, + * nor of their ordering. The callback will be called as close as possible to the + * time specified. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay`will be set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setTimeout()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearTimeout} + */ + function setTimeout( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + /** + * Cancels a `Timeout` object created by `setTimeout()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setTimeout} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules repeated execution of `callback` every `delay` milliseconds. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay` will be + * set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setInterval()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearInterval} + */ + function setInterval( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + /** + * Cancels a `Timeout` object created by `setInterval()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setInterval} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules the "immediate" execution of the `callback` after I/O events' + * callbacks. + * + * When multiple calls to `setImmediate()` are made, the `callback` functions are + * queued for execution in the order in which they are created. The entire callback + * queue is processed every event loop iteration. If an immediate timer is queued + * from inside an executing callback, that timer will not be triggered until the + * next event loop iteration. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setImmediate()`. + * @since v0.9.1 + * @param callback The function to call at the end of this turn of the Node.js `Event Loop` + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearImmediate} + */ + function setImmediate( + callback: (...args: TArgs) => void, + ...args: TArgs + ): NodeJS.Immediate; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + /** + * Cancels an `Immediate` object created by `setImmediate()`. + * @since v0.9.1 + * @param immediate An `Immediate` object as returned by {@link setImmediate}. + */ + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module "node:timers" { + export * from "timers"; +} diff --git a/node_modules/@types/node/timers/promises.d.ts b/node_modules/@types/node/timers/promises.d.ts new file mode 100644 index 0000000..5a54dc7 --- /dev/null +++ b/node_modules/@types/node/timers/promises.d.ts @@ -0,0 +1,93 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via`require('node:timers/promises')`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module "timers/promises" { + import { TimerOptions } from "node:timers"; + /** + * ```js + * import { + * setTimeout, + * } from 'timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * If `ref` is `true`, you need to call `next()` of async iterator explicitly + * or implicitly to keep the event loop alive. + * + * ```js + * import { + * setInterval, + * } from 'timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; + interface Scheduler { + /** + * ```js + * import { scheduler } from 'node:timers/promises'; + * + * await scheduler.wait(1000); // Wait one second before continuing + * ``` + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.wait(delay, options) is roughly equivalent to calling timersPromises.setTimeout(delay, undefined, options) except that the ref option is not supported. + * @since v16.14.0 + * @experimental + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + */ + wait: (delay?: number, options?: TimerOptions) => Promise; + /** + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.yield() is equivalent to calling timersPromises.setImmediate() with no arguments. + * @since v16.14.0 + * @experimental + */ + yield: () => Promise; + } + const scheduler: Scheduler; +} +declare module "node:timers/promises" { + export * from "timers/promises"; +} diff --git a/node_modules/@types/node/tls.d.ts b/node_modules/@types/node/tls.d.ts new file mode 100644 index 0000000..9f6d6be --- /dev/null +++ b/node_modules/@types/node/tls.d.ts @@ -0,0 +1,1210 @@ +/** + * The `node:tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * const tls = require('node:tls'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/tls.js) + */ +declare module "tls" { + import { X509Certificate } from "node:crypto"; + import * as net from "node:net"; + import * as stream from "stream"; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + /** + * `true` if a Certificate Authority (CA), `false` otherwise. + * @since v18.13.0 + */ + ca: boolean; + /** + * The DER encoded X.509 certificate data. + */ + raw: Buffer; + /** + * The certificate subject. + */ + subject: Certificate; + /** + * The certificate issuer, described in the same terms as the `subject`. + */ + issuer: Certificate; + /** + * The date-time the certificate is valid from. + */ + valid_from: string; + /** + * The date-time the certificate is valid to. + */ + valid_to: string; + /** + * The certificate serial number, as a hex string. + */ + serialNumber: string; + /** + * The SHA-1 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint: string; + /** + * The SHA-256 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint256: string; + /** + * The SHA-512 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint512: string; + /** + * The extended key usage, a set of OIDs. + */ + ext_key_usage?: string[]; + /** + * A string containing concatenated names for the subject, + * an alternative to the `subject` names. + */ + subjectaltname?: string; + /** + * An array describing the AuthorityInfoAccess, used with OCSP. + */ + infoAccess?: NodeJS.Dict; + /** + * For RSA keys: The RSA bit size. + * + * For EC keys: The key size in bits. + */ + bits?: number; + /** + * The RSA exponent, as a string in hexadecimal number notation. + */ + exponent?: string; + /** + * The RSA modulus, as a hexadecimal string. + */ + modulus?: string; + /** + * The public key. + */ + pubkey?: Buffer; + /** + * The ASN.1 name of the OID of the elliptic curve. + * Well-known curves are identified by an OID. + * While it is unusual, it is possible that the curve + * is identified by its mathematical properties, + * in which case it will not have an OID. + */ + asn1Curve?: string; + /** + * The NIST name for the elliptic curve,if it has one + * (not all well-known curves have been assigned names by NIST). + */ + nistCurve?: string; + } + interface DetailedPeerCertificate extends PeerCertificate { + /** + * The issuer certificate object. + * For self-signed certificates, this may be a circular reference. + */ + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate}) will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket | stream.Duplex, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example, a TLSv1.2 protocol with AES256-SHA cipher: + * + * ```json + * { + * "name": "AES256-SHA", + * "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA", + * "version": "SSLv3" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void, + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + addListener(event: "secureConnect", listener: () => void): this; + addListener(event: "session", listener: (session: Buffer) => void): this; + addListener(event: "keylog", listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "OCSPResponse", response: Buffer): boolean; + emit(event: "secureConnect"): boolean; + emit(event: "session", session: Buffer): boolean; + emit(event: "keylog", line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "OCSPResponse", listener: (response: Buffer) => void): this; + on(event: "secureConnect", listener: () => void): this; + on(event: "session", listener: (session: Buffer) => void): this; + on(event: "keylog", listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "OCSPResponse", listener: (response: Buffer) => void): this; + once(event: "secureConnect", listener: () => void): this; + once(event: "session", listener: (session: Buffer) => void): this; + once(event: "keylog", listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependListener(event: "secureConnect", listener: () => void): this; + prependListener(event: "session", listener: (session: Buffer) => void): this; + prependListener(event: "keylog", listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependOnceListener(event: "secureConnect", listener: () => void): this; + prependOnceListener(event: "session", listener: (session: Buffer) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc), or a TLS context object created + * with {@link createSecureContext} itself. + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; + emit(event: "newSession", sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit( + event: "resumeSession", + sessionId: Buffer, + callback: (err: Error | null, sessionData: Buffer | null) => void, + ): boolean; + emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: "newSession", listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = "TLSv1.3" | "TLSv1.2" | "TLSv1.1" | "TLSv1"; + interface SecureContextOptions { + /** + * If set, this will be called when a client opens a connection using the ALPN extension. + * One argument will be passed to the callback: an object containing `servername` and `protocols` fields, + * respectively containing the server name from the SNI extension (if any) and an array of + * ALPN protocol name strings. The callback must return either one of the strings listed in `protocols`, + * which will be returned to the client as the selected ALPN protocol, or `undefined`, + * to reject the connection with a fatal alert. If a string is returned that does not match one of + * the client's ALPN protocols, an error will be thrown. + * This option cannot be used with the `ALPNProtocols` option, and setting both options will throw an error. + */ + ALPNCallback?: ((arg: { servername: string; protocols: string[] }) => string | undefined) | undefined; + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * `'auto'` or custom Diffie-Hellman parameters, required for non-ECDHE perfect forward secrecy. + * If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available. + * ECDHE-based perfect forward secrecy will still be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom`options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `node:cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ], + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect( + port: number, + host?: string, + options?: ConnectionOptions, + secureConnectListener?: () => void, + ): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair( + context?: SecureContext, + isServer?: boolean, + requestCert?: boolean, + rejectUnauthorized?: boolean, + ): SecurePair; + /** + * {@link createServer} sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * {@link createServer} uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as `server.addContext()`, + * but has no public methods. The {@link Server} constructor and the {@link createServer} method do not support the `secureContext` option. + * + * A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * + * Custom DHE parameters are discouraged in favor of the new `dhparam: 'auto'`option. When set to `'auto'`, well-known DHE parameters of sufficient strength + * will be selected automatically. Otherwise, if necessary, `openssl dhparam` can + * be used to create custom parameters. The key length must be greater than or + * equal to 1024 bits or else an error will be thrown. Although 1024 bits is + * permissible, use 2048 bits or larger for stronger security. + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of {@link createSecureContext}. + * + * Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`. + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is 'auto'. See tls.createSecureContext() for further + * information. + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the maxVersion option of + * tls.createSecureContext(). It can be assigned any of the supported TLS + * protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: + * 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets + * the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the highest maximum + * is used. + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the minVersion option of tls.createSecureContext(). + * It can be assigned any of the supported TLS protocol versions, + * 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless + * changed using CLI options. Using --tls-min-v1.0 sets the default to + * 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using + * --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options + * are provided, the lowest minimum is used. + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * The default value of the ciphers option of tls.createSecureContext(). + * It can be assigned any of the supported OpenSSL ciphers. + * Defaults to the content of crypto.constants.defaultCoreCipherList, unless + * changed using CLI options using --tls-default-ciphers. + */ + let DEFAULT_CIPHERS: string; + /** + * An immutable array of strings representing the root certificates (in PEM + * format) used for verifying peer certificates. This is the default value + * of the ca option to tls.createSecureContext(). + */ + const rootCertificates: readonly string[]; +} +declare module "node:tls" { + export * from "tls"; +} diff --git a/node_modules/@types/node/trace_events.d.ts b/node_modules/@types/node/trace_events.d.ts new file mode 100644 index 0000000..3361359 --- /dev/null +++ b/node_modules/@types/node/trace_events.d.ts @@ -0,0 +1,182 @@ +/** + * The `node:trace_events` module provides a mechanism to centralize tracing + * information generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `node:trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed `async_hooks` trace data. + * The `async_hooks` events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()`output. + * * `node.threadpoolwork.sync`: Enables capture of trace data for threadpool + * synchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.threadpoolwork.async`: Enables capture of trace data for threadpool + * asynchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.net.native`: Enables capture of trace data for network. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.fs_dir.sync`: Enables capture of trace data for file system sync + * directory methods. + * * `node.fs.async`: Enables capture of trace data for file system async methods. + * * `node.fs_dir.async`: Enables capture of trace data for file system async + * directory methods. + * * `node.perf`: Enables capture of `Performance API` measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `node:vm` module's`runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The `V8` events are GC, compiling, and execution related. + * * `node.http`: Enables capture of trace data for http request / response. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled`flag to enable trace events. This requirement has been removed. However, the`--trace-events-enabled` flag _may_ still be + * used and will enable the`node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `node:trace_events` module: + * + * ```js + * const trace_events = require('node:trace_events'); + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where`${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like`SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in `Worker` threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/trace_events.js) + */ +declare module "trace_events" { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * const trace_events = require('node:trace_events'); + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + * @return . + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command`node --trace-event-categories node.perf test.js` will print`'node.async_hooks,node.perf'` to the console. + * + * ```js + * const trace_events = require('node:trace_events'); + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module "node:trace_events" { + export * from "trace_events"; +} diff --git a/node_modules/@types/node/tty.d.ts b/node_modules/@types/node/tty.d.ts new file mode 100644 index 0000000..1c0dafd --- /dev/null +++ b/node_modules/@types/node/tty.d.ts @@ -0,0 +1,208 @@ +/** + * The `node:tty` module provides the `tty.ReadStream` and `tty.WriteStream`classes. In most cases, it will not be necessary or possible to use this module + * directly. However, it can be accessed using: + * + * ```js + * const tty = require('node:tty'); + * ``` + * + * When Node.js detects that it is being run with a text terminal ("TTY") + * attached, `process.stdin` will, by default, be initialized as an instance of`tty.ReadStream` and both `process.stdout` and `process.stderr` will, by + * default, be instances of `tty.WriteStream`. The preferred method of determining + * whether Node.js is being run within a TTY context is to check that the value of + * the `process.stdout.isTTY` property is `true`: + * + * ```console + * $ node -p -e "Boolean(process.stdout.isTTY)" + * true + * $ node -p -e "Boolean(process.stdout.isTTY)" | cat + * false + * ``` + * + * In most cases, there should be little to no reason for an application to + * manually create instances of the `tty.ReadStream` and `tty.WriteStream`classes. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/tty.js) + */ +declare module "tty" { + import * as net from "node:net"; + /** + * The `tty.isatty()` method returns `true` if the given `fd` is associated with + * a TTY and `false` if it is not, including whenever `fd` is not a non-negative + * integer. + * @since v0.5.8 + * @param fd A numeric file descriptor + */ + function isatty(fd: number): boolean; + /** + * Represents the readable side of a TTY. In normal circumstances `process.stdin` will be the only `tty.ReadStream` instance in a Node.js + * process and there should be no reason to create additional instances. + * @since v0.5.8 + */ + class ReadStream extends net.Socket { + constructor(fd: number, options?: net.SocketConstructorOpts); + /** + * A `boolean` that is `true` if the TTY is currently configured to operate as a + * raw device. + * + * This flag is always `false` when a process starts, even if the terminal is + * operating in raw mode. Its value will change with subsequent calls to`setRawMode`. + * @since v0.7.7 + */ + isRaw: boolean; + /** + * Allows configuration of `tty.ReadStream` so that it operates as a raw device. + * + * When in raw mode, input is always available character-by-character, not + * including modifiers. Additionally, all special processing of characters by the + * terminal is disabled, including echoing input + * characters. Ctrl+C will no longer cause a `SIGINT` when + * in this mode. + * @since v0.7.7 + * @param mode If `true`, configures the `tty.ReadStream` to operate as a raw device. If `false`, configures the `tty.ReadStream` to operate in its default mode. The `readStream.isRaw` + * property will be set to the resulting mode. + * @return The read stream instance. + */ + setRawMode(mode: boolean): this; + /** + * A `boolean` that is always `true` for `tty.ReadStream` instances. + * @since v0.5.8 + */ + isTTY: boolean; + } + /** + * -1 - to the left from cursor + * 0 - the entire line + * 1 - to the right from cursor + */ + type Direction = -1 | 0 | 1; + /** + * Represents the writable side of a TTY. In normal circumstances,`process.stdout` and `process.stderr` will be the only`tty.WriteStream` instances created for a Node.js process and there + * should be no reason to create additional instances. + * @since v0.5.8 + */ + class WriteStream extends net.Socket { + constructor(fd: number); + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "resize", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "resize"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "resize", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "resize", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "resize", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "resize", listener: () => void): this; + /** + * `writeStream.clearLine()` clears the current line of this `WriteStream` in a + * direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearLine(dir: Direction, callback?: () => void): boolean; + /** + * `writeStream.clearScreenDown()` clears this `WriteStream` from the current + * cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearScreenDown(callback?: () => void): boolean; + /** + * `writeStream.cursorTo()` moves this `WriteStream`'s cursor to the specified + * position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + cursorTo(x: number, y?: number, callback?: () => void): boolean; + cursorTo(x: number, callback: () => void): boolean; + /** + * `writeStream.moveCursor()` moves this `WriteStream`'s cursor _relative_ to its + * current position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + moveCursor(dx: number, dy: number, callback?: () => void): boolean; + /** + * Returns: + * + * * `1` for 2, + * * `4` for 16, + * * `8` for 256, + * * `24` for 16,777,216 colors supported. + * + * Use this to determine what colors the terminal supports. Due to the nature of + * colors in terminals it is possible to either have false positives or false + * negatives. It depends on process information and the environment variables that + * may lie about what terminal is used. + * It is possible to pass in an `env` object to simulate the usage of a specific + * terminal. This can be useful to check how specific environment settings behave. + * + * To enforce a specific color support, use one of the below environment settings. + * + * * 2 colors: `FORCE_COLOR = 0` (Disables colors) + * * 16 colors: `FORCE_COLOR = 1` + * * 256 colors: `FORCE_COLOR = 2` + * * 16,777,216 colors: `FORCE_COLOR = 3` + * + * Disabling color support is also possible by using the `NO_COLOR` and`NODE_DISABLE_COLORS` environment variables. + * @since v9.9.0 + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + getColorDepth(env?: object): number; + /** + * Returns `true` if the `writeStream` supports at least as many colors as provided + * in `count`. Minimum support is 2 (black and white). + * + * This has the same false positives and negatives as described in `writeStream.getColorDepth()`. + * + * ```js + * process.stdout.hasColors(); + * // Returns true or false depending on if `stdout` supports at least 16 colors. + * process.stdout.hasColors(256); + * // Returns true or false depending on if `stdout` supports at least 256 colors. + * process.stdout.hasColors({ TMUX: '1' }); + * // Returns true. + * process.stdout.hasColors(2 ** 24, { TMUX: '1' }); + * // Returns false (the environment setting pretends to support 2 ** 8 colors). + * ``` + * @since v11.13.0, v10.16.0 + * @param [count=16] The number of colors that are requested (minimum 2). + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + hasColors(count?: number): boolean; + hasColors(env?: object): boolean; + hasColors(count: number, env?: object): boolean; + /** + * `writeStream.getWindowSize()` returns the size of the TTY + * corresponding to this `WriteStream`. The array is of the type`[numColumns, numRows]` where `numColumns` and `numRows` represent the number + * of columns and rows in the corresponding TTY. + * @since v0.7.7 + */ + getWindowSize(): [number, number]; + /** + * A `number` specifying the number of columns the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + columns: number; + /** + * A `number` specifying the number of rows the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + rows: number; + /** + * A `boolean` that is always `true`. + * @since v0.5.8 + */ + isTTY: boolean; + } +} +declare module "node:tty" { + export * from "tty"; +} diff --git a/node_modules/@types/node/url.d.ts b/node_modules/@types/node/url.d.ts new file mode 100644 index 0000000..dc2d0a0 --- /dev/null +++ b/node_modules/@types/node/url.d.ts @@ -0,0 +1,944 @@ +/** + * The `node:url` module provides utilities for URL resolution and parsing. It can + * be accessed using: + * + * ```js + * import url from 'node:url'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.11.0/lib/url.js) + */ +declare module "url" { + import { Blob as NodeBlob } from "node:buffer"; + import { ClientRequestArgs } from "node:http"; + import { ParsedUrlQuery, ParsedUrlQueryInput } from "node:querystring"; + // Input to `url.format` + interface UrlObject { + auth?: string | null | undefined; + hash?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + href?: string | null | undefined; + pathname?: string | null | undefined; + protocol?: string | null | undefined; + search?: string | null | undefined; + slashes?: boolean | null | undefined; + port?: string | number | null | undefined; + query?: string | null | ParsedUrlQueryInput | undefined; + } + // Output of `url.parse` + interface Url { + auth: string | null; + hash: string | null; + host: string | null; + hostname: string | null; + href: string; + path: string | null; + pathname: string | null; + protocol: string | null; + search: string | null; + slashes: boolean | null; + port: string | null; + query: string | null | ParsedUrlQuery; + } + interface UrlWithParsedQuery extends Url { + query: ParsedUrlQuery; + } + interface UrlWithStringQuery extends Url { + query: string | null; + } + /** + * The `url.parse()` method takes a URL string, parses it, and returns a URL + * object. + * + * A `TypeError` is thrown if `urlString` is not a string. + * + * A `URIError` is thrown if the `auth` property is present but cannot be decoded. + * + * `url.parse()` uses a lenient, non-standard algorithm for parsing URL + * strings. It is prone to security issues such as [host name spoofing](https://hackerone.com/reports/678487) and incorrect handling of usernames and passwords. Do not use with untrusted + * input. CVEs are not issued for `url.parse()` vulnerabilities. Use the `WHATWG URL` API instead. + * @since v0.1.25 + * @deprecated Use the WHATWG URL API instead. + * @param urlString The URL string to parse. + * @param [parseQueryString=false] If `true`, the `query` property will always be set to an object returned by the {@link querystring} module's `parse()` method. If `false`, the `query` property + * on the returned URL object will be an unparsed, undecoded string. + * @param [slashesDenoteHost=false] If `true`, the first token after the literal string `//` and preceding the next `/` will be interpreted as the `host`. For instance, given `//foo/bar`, the + * result would be `{host: 'foo', pathname: '/bar'}` rather than `{pathname: '//foo/bar'}`. + */ + function parse(urlString: string): UrlWithStringQuery; + function parse( + urlString: string, + parseQueryString: false | undefined, + slashesDenoteHost?: boolean, + ): UrlWithStringQuery; + function parse(urlString: string, parseQueryString: true, slashesDenoteHost?: boolean): UrlWithParsedQuery; + function parse(urlString: string, parseQueryString: boolean, slashesDenoteHost?: boolean): Url; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * const url = require('node:url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: URL, options?: URLFormatOptions): string; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * const url = require('node:url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: UrlObject | string): string; + /** + * The `url.resolve()` method resolves a target URL relative to a base URL in a + * manner similar to that of a web browser resolving an anchor tag. + * + * ```js + * const url = require('node:url'); + * url.resolve('/one/two/three', 'four'); // '/one/two/four' + * url.resolve('http://example.com/', '/one'); // 'http://example.com/one' + * url.resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * + * To achieve the same result using the WHATWG URL API: + * + * ```js + * function resolve(from, to) { + * const resolvedUrl = new URL(to, new URL(from, 'resolve://')); + * if (resolvedUrl.protocol === 'resolve:') { + * // `from` is a relative URL. + * const { pathname, search, hash } = resolvedUrl; + * return pathname + search + hash; + * } + * return resolvedUrl.toString(); + * } + * + * resolve('/one/two/three', 'four'); // '/one/two/four' + * resolve('http://example.com/', '/one'); // 'http://example.com/one' + * resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param from The base URL to use if `to` is a relative URL. + * @param to The target URL to resolve. + */ + function resolve(from: string, to: string): string; + /** + * Returns the [Punycode](https://tools.ietf.org/html/rfc5891#section-4.4) ASCII serialization of the `domain`. If `domain` is an + * invalid domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToUnicode}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToASCII('español.com')); + * // Prints xn--espaol-zwa.com + * console.log(url.domainToASCII('中文.com')); + * // Prints xn--fiq228c.com + * console.log(url.domainToASCII('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToASCII(domain: string): string; + /** + * Returns the Unicode serialization of the `domain`. If `domain` is an invalid + * domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToASCII}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToUnicode('xn--espaol-zwa.com')); + * // Prints español.com + * console.log(url.domainToUnicode('xn--fiq228c.com')); + * // Prints 中文.com + * console.log(url.domainToUnicode('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToUnicode(domain: string): string; + /** + * This function ensures the correct decodings of percent-encoded characters as + * well as ensuring a cross-platform valid absolute path string. + * + * ```js + * import { fileURLToPath } from 'node:url'; + * + * const __filename = fileURLToPath(import.meta.url); + * + * new URL('file:///C:/path/').pathname; // Incorrect: /C:/path/ + * fileURLToPath('file:///C:/path/'); // Correct: C:\path\ (Windows) + * + * new URL('file://nas/foo.txt').pathname; // Incorrect: /foo.txt + * fileURLToPath('file://nas/foo.txt'); // Correct: \\nas\foo.txt (Windows) + * + * new URL('file:///你好.txt').pathname; // Incorrect: /%E4%BD%A0%E5%A5%BD.txt + * fileURLToPath('file:///你好.txt'); // Correct: /你好.txt (POSIX) + * + * new URL('file:///hello world').pathname; // Incorrect: /hello%20world + * fileURLToPath('file:///hello world'); // Correct: /hello world (POSIX) + * ``` + * @since v10.12.0 + * @param url The file URL string or URL object to convert to a path. + * @return The fully-resolved platform-specific Node.js file path. + */ + function fileURLToPath(url: string | URL): string; + /** + * This function ensures that `path` is resolved absolutely, and that the URL + * control characters are correctly encoded when converting into a File URL. + * + * ```js + * import { pathToFileURL } from 'node:url'; + * + * new URL('/foo#1', 'file:'); // Incorrect: file:///foo#1 + * pathToFileURL('/foo#1'); // Correct: file:///foo%231 (POSIX) + * + * new URL('/some/path%.c', 'file:'); // Incorrect: file:///some/path%.c + * pathToFileURL('/some/path%.c'); // Correct: file:///some/path%25.c (POSIX) + * ``` + * @since v10.12.0 + * @param path The path to convert to a File URL. + * @return The file URL object. + */ + function pathToFileURL(path: string): URL; + /** + * This utility function converts a URL object into an ordinary options object as + * expected by the `http.request()` and `https.request()` APIs. + * + * ```js + * import { urlToHttpOptions } from 'node:url'; + * const myURL = new URL('https://a:b@測試?abc#foo'); + * + * console.log(urlToHttpOptions(myURL)); + * /* + * { + * protocol: 'https:', + * hostname: 'xn--g6w251d', + * hash: '#foo', + * search: '?abc', + * pathname: '/', + * path: '/?abc', + * href: 'https://a:b@xn--g6w251d/?abc#foo', + * auth: 'a:b' + * } + * + * ``` + * @since v15.7.0, v14.18.0 + * @param url The `WHATWG URL` object to convert to an options object. + * @return Options object + */ + function urlToHttpOptions(url: URL): ClientRequestArgs; + interface URLFormatOptions { + /** + * `true` if the serialized URL string should include the username and password, `false` otherwise. + * @default true + */ + auth?: boolean | undefined; + /** + * `true` if the serialized URL string should include the fragment, `false` otherwise. + * @default true + */ + fragment?: boolean | undefined; + /** + * `true` if the serialized URL string should include the search query, `false` otherwise. + * @default true + */ + search?: boolean | undefined; + /** + * `true` if Unicode characters appearing in the host component of the URL string should be encoded directly as opposed to + * being Punycode encoded. + * @default false + */ + unicode?: boolean | undefined; + } + /** + * Browser-compatible `URL` class, implemented by following the WHATWG URL + * Standard. [Examples of parsed URLs](https://url.spec.whatwg.org/#example-url-parsing) may be found in the Standard itself. + * The `URL` class is also available on the global object. + * + * In accordance with browser conventions, all properties of `URL` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. Thus, unlike `legacy urlObject`s, + * using the `delete` keyword on any properties of `URL` objects (e.g. `delete myURL.protocol`, `delete myURL.pathname`, etc) has no effect but will still + * return `true`. + * @since v7.0.0, v6.13.0 + */ + class URL { + /** + * Creates a `'blob:nodedata:...'` URL string that represents the given `Blob` object and can be used to retrieve the `Blob` later. + * + * ```js + * const { + * Blob, + * resolveObjectURL, + * } = require('node:buffer'); + * + * const blob = new Blob(['hello']); + * const id = URL.createObjectURL(blob); + * + * // later... + * + * const otherBlob = resolveObjectURL(id); + * console.log(otherBlob.size); + * ``` + * + * The data stored by the registered `Blob` will be retained in memory until `URL.revokeObjectURL()` is called to remove it. + * + * `Blob` objects are registered within the current thread. If using Worker + * Threads, `Blob` objects registered within one Worker will not be available + * to other workers or the main thread. + * @since v16.7.0 + * @experimental + */ + static createObjectURL(blob: NodeBlob): string; + /** + * Removes the stored `Blob` identified by the given ID. Attempting to revoke a + * ID that isn't registered will silently fail. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + static revokeObjectURL(id: string): void; + /** + * Checks if an `input` relative to the `base` can be parsed to a `URL`. + * + * ```js + * const isValid = URL.canParse('/foo', 'https://example.org/'); // true + * + * const isNotValid = URL.canParse('/foo'); // false + * ``` + * @since v19.9.0 + * @param input The absolute or relative input URL to parse. If `input` is relative, then `base` is required. If `input` is absolute, the `base` is ignored. If `input` is not a string, it is + * `converted to a string` first. + * @param base The base URL to resolve against if the `input` is not absolute. If `base` is not a string, it is `converted to a string` first. + */ + static canParse(input: string, base?: string): boolean; + constructor(input: string | { toString: () => string }, base?: string | URL); + /** + * Gets and sets the fragment portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/foo#bar'); + * console.log(myURL.hash); + * // Prints #bar + * + * myURL.hash = 'baz'; + * console.log(myURL.href); + * // Prints https://example.org/foo#baz + * ``` + * + * Invalid URL characters included in the value assigned to the `hash` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + hash: string; + /** + * Gets and sets the host portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.host); + * // Prints example.org:81 + * + * myURL.host = 'example.com:82'; + * console.log(myURL.href); + * // Prints https://example.com:82/foo + * ``` + * + * Invalid host values assigned to the `host` property are ignored. + */ + host: string; + /** + * Gets and sets the host name portion of the URL. The key difference between`url.host` and `url.hostname` is that `url.hostname` does _not_ include the + * port. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.hostname); + * // Prints example.org + * + * // Setting the hostname does not change the port + * myURL.hostname = 'example.com'; + * console.log(myURL.href); + * // Prints https://example.com:81/foo + * + * // Use myURL.host to change the hostname and port + * myURL.host = 'example.org:82'; + * console.log(myURL.href); + * // Prints https://example.org:82/foo + * ``` + * + * Invalid host name values assigned to the `hostname` property are ignored. + */ + hostname: string; + /** + * Gets and sets the serialized URL. + * + * ```js + * const myURL = new URL('https://example.org/foo'); + * console.log(myURL.href); + * // Prints https://example.org/foo + * + * myURL.href = 'https://example.com/bar'; + * console.log(myURL.href); + * // Prints https://example.com/bar + * ``` + * + * Getting the value of the `href` property is equivalent to calling {@link toString}. + * + * Setting the value of this property to a new value is equivalent to creating a + * new `URL` object using `new URL(value)`. Each of the `URL`object's properties will be modified. + * + * If the value assigned to the `href` property is not a valid URL, a `TypeError`will be thrown. + */ + href: string; + /** + * Gets the read-only serialization of the URL's origin. + * + * ```js + * const myURL = new URL('https://example.org/foo/bar?baz'); + * console.log(myURL.origin); + * // Prints https://example.org + * ``` + * + * ```js + * const idnURL = new URL('https://測試'); + * console.log(idnURL.origin); + * // Prints https://xn--g6w251d + * + * console.log(idnURL.hostname); + * // Prints xn--g6w251d + * ``` + */ + readonly origin: string; + /** + * Gets and sets the password portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.password); + * // Prints xyz + * + * myURL.password = '123'; + * console.log(myURL.href); + * // Prints https://abc:123@example.com/ + * ``` + * + * Invalid URL characters included in the value assigned to the `password` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + password: string; + /** + * Gets and sets the path portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc/xyz?123'); + * console.log(myURL.pathname); + * // Prints /abc/xyz + * + * myURL.pathname = '/abcdef'; + * console.log(myURL.href); + * // Prints https://example.org/abcdef?123 + * ``` + * + * Invalid URL characters included in the value assigned to the `pathname` property are `percent-encoded`. The selection of which characters + * to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + pathname: string; + /** + * Gets and sets the port portion of the URL. + * + * The port value may be a number or a string containing a number in the range `0` to `65535` (inclusive). Setting the value to the default port of the `URL` objects given `protocol` will + * result in the `port` value becoming + * the empty string (`''`). + * + * The port value can be an empty string in which case the port depends on + * the protocol/scheme: + * + * + * + * Upon assigning a value to the port, the value will first be converted to a + * string using `.toString()`. + * + * If that string is invalid but it begins with a number, the leading number is + * assigned to `port`. + * If the number lies outside the range denoted above, it is ignored. + * + * ```js + * const myURL = new URL('https://example.org:8888'); + * console.log(myURL.port); + * // Prints 8888 + * + * // Default ports are automatically transformed to the empty string + * // (HTTPS protocol's default port is 443) + * myURL.port = '443'; + * console.log(myURL.port); + * // Prints the empty string + * console.log(myURL.href); + * // Prints https://example.org/ + * + * myURL.port = 1234; + * console.log(myURL.port); + * // Prints 1234 + * console.log(myURL.href); + * // Prints https://example.org:1234/ + * + * // Completely invalid port strings are ignored + * myURL.port = 'abcd'; + * console.log(myURL.port); + * // Prints 1234 + * + * // Leading numbers are treated as a port number + * myURL.port = '5678abcd'; + * console.log(myURL.port); + * // Prints 5678 + * + * // Non-integers are truncated + * myURL.port = 1234.5678; + * console.log(myURL.port); + * // Prints 1234 + * + * // Out-of-range numbers which are not represented in scientific notation + * // will be ignored. + * myURL.port = 1e10; // 10000000000, will be range-checked as described below + * console.log(myURL.port); + * // Prints 1234 + * ``` + * + * Numbers which contain a decimal point, + * such as floating-point numbers or numbers in scientific notation, + * are not an exception to this rule. + * Leading numbers up to the decimal point will be set as the URL's port, + * assuming they are valid: + * + * ```js + * myURL.port = 4.567e21; + * console.log(myURL.port); + * // Prints 4 (because it is the leading number in the string '4.567e21') + * ``` + */ + port: string; + /** + * Gets and sets the protocol portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org'); + * console.log(myURL.protocol); + * // Prints https: + * + * myURL.protocol = 'ftp'; + * console.log(myURL.href); + * // Prints ftp://example.org/ + * ``` + * + * Invalid URL protocol values assigned to the `protocol` property are ignored. + */ + protocol: string; + /** + * Gets and sets the serialized query portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc?123'); + * console.log(myURL.search); + * // Prints ?123 + * + * myURL.search = 'abc=xyz'; + * console.log(myURL.href); + * // Prints https://example.org/abc?abc=xyz + * ``` + * + * Any invalid URL characters appearing in the value assigned the `search`property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + search: string; + /** + * Gets the `URLSearchParams` object representing the query parameters of the + * URL. This property is read-only but the `URLSearchParams` object it provides + * can be used to mutate the URL instance; to replace the entirety of query + * parameters of the URL, use the {@link search} setter. See `URLSearchParams` documentation for details. + * + * Use care when using `.searchParams` to modify the `URL` because, + * per the WHATWG specification, the `URLSearchParams` object uses + * different rules to determine which characters to percent-encode. For + * instance, the `URL` object will not percent encode the ASCII tilde (`~`) + * character, while `URLSearchParams` will always encode it: + * + * ```js + * const myURL = new URL('https://example.org/abc?foo=~bar'); + * + * console.log(myURL.search); // prints ?foo=~bar + * + * // Modify the URL via searchParams... + * myURL.searchParams.sort(); + * + * console.log(myURL.search); // prints ?foo=%7Ebar + * ``` + */ + readonly searchParams: URLSearchParams; + /** + * Gets and sets the username portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.username); + * // Prints abc + * + * myURL.username = '123'; + * console.log(myURL.href); + * // Prints https://123:xyz@example.com/ + * ``` + * + * Any invalid URL characters appearing in the value assigned the `username` property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + username: string; + /** + * The `toString()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toJSON}. + */ + toString(): string; + /** + * The `toJSON()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toString}. + * + * This method is automatically called when an `URL` object is serialized + * with [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). + * + * ```js + * const myURLs = [ + * new URL('https://www.example.com'), + * new URL('https://test.example.org'), + * ]; + * console.log(JSON.stringify(myURLs)); + * // Prints ["https://www.example.com/","https://test.example.org/"] + * ``` + */ + toJSON(): string; + } + /** + * The `URLSearchParams` API provides read and write access to the query of a `URL`. The `URLSearchParams` class can also be used standalone with one of the + * four following constructors. + * The `URLSearchParams` class is also available on the global object. + * + * The WHATWG `URLSearchParams` interface and the `querystring` module have + * similar purpose, but the purpose of the `querystring` module is more + * general, as it allows the customization of delimiter characters (`&` and `=`). + * On the other hand, this API is designed purely for URL query strings. + * + * ```js + * const myURL = new URL('https://example.org/?abc=123'); + * console.log(myURL.searchParams.get('abc')); + * // Prints 123 + * + * myURL.searchParams.append('abc', 'xyz'); + * console.log(myURL.href); + * // Prints https://example.org/?abc=123&abc=xyz + * + * myURL.searchParams.delete('abc'); + * myURL.searchParams.set('a', 'b'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * + * const newSearchParams = new URLSearchParams(myURL.searchParams); + * // The above is equivalent to + * // const newSearchParams = new URLSearchParams(myURL.search); + * + * newSearchParams.append('a', 'c'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * console.log(newSearchParams.toString()); + * // Prints a=b&a=c + * + * // newSearchParams.toString() is implicitly called + * myURL.search = newSearchParams; + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * newSearchParams.delete('a'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * ``` + * @since v7.5.0, v6.13.0 + */ + class URLSearchParams implements Iterable<[string, string]> { + constructor( + init?: + | URLSearchParams + | string + | Record + | Iterable<[string, string]> + | ReadonlyArray<[string, string]>, + ); + /** + * Append a new name-value pair to the query string. + */ + append(name: string, value: string): void; + /** + * If `value` is provided, removes all name-value pairs + * where name is `name` and value is `value`. + * + * If `value` is not provided, removes all name-value pairs whose name is `name`. + */ + delete(name: string, value?: string): void; + /** + * Returns an ES6 `Iterator` over each of the name-value pairs in the query. + * Each item of the iterator is a JavaScript `Array`. The first item of the `Array` is the `name`, the second item of the `Array` is the `value`. + * + * Alias for `urlSearchParams[@@iterator]()`. + */ + entries(): IterableIterator<[string, string]>; + /** + * Iterates over each name-value pair in the query and invokes the given function. + * + * ```js + * const myURL = new URL('https://example.org/?a=b&c=d'); + * myURL.searchParams.forEach((value, name, searchParams) => { + * console.log(name, value, myURL.searchParams === searchParams); + * }); + * // Prints: + * // a b true + * // c d true + * ``` + * @param fn Invoked for each name-value pair in the query + * @param thisArg To be used as `this` value for when `fn` is called + */ + forEach( + fn: (this: TThis, value: string, name: string, searchParams: URLSearchParams) => void, + thisArg?: TThis, + ): void; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns the values of all name-value pairs whose name is `name`. If there are + * no such pairs, an empty array is returned. + */ + getAll(name: string): string[]; + /** + * Checks if the `URLSearchParams` object contains key-value pair(s) based on `name` and an optional `value` argument. + * + * If `value` is provided, returns `true` when name-value pair with + * same `name` and `value` exists. + * + * If `value` is not provided, returns `true` if there is at least one name-value + * pair whose name is `name`. + */ + has(name: string, value?: string): boolean; + /** + * Returns an ES6 `Iterator` over the names of each name-value pair. + * + * ```js + * const params = new URLSearchParams('foo=bar&foo=baz'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // foo + * ``` + */ + keys(): IterableIterator; + /** + * Sets the value in the `URLSearchParams` object associated with `name` to`value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value` and remove all others. If not, + * append the name-value pair to the query string. + * + * ```js + * const params = new URLSearchParams(); + * params.append('foo', 'bar'); + * params.append('foo', 'baz'); + * params.append('abc', 'def'); + * console.log(params.toString()); + * // Prints foo=bar&foo=baz&abc=def + * + * params.set('foo', 'def'); + * params.set('xyz', 'opq'); + * console.log(params.toString()); + * // Prints foo=def&abc=def&xyz=opq + * ``` + */ + set(name: string, value: string): void; + /** + * The total number of parameter entries. + * @since v19.8.0 + */ + readonly size: number; + /** + * Sort all existing name-value pairs in-place by their names. Sorting is done + * with a [stable sorting algorithm](https://en.wikipedia.org/wiki/Sorting_algorithm#Stability), so relative order between name-value pairs + * with the same name is preserved. + * + * This method can be used, in particular, to increase cache hits. + * + * ```js + * const params = new URLSearchParams('query[]=abc&type=search&query[]=123'); + * params.sort(); + * console.log(params.toString()); + * // Prints query%5B%5D=abc&query%5B%5D=123&type=search + * ``` + * @since v7.7.0, v6.13.0 + */ + sort(): void; + /** + * Returns the search parameters serialized as a string, with characters + * percent-encoded where necessary. + */ + toString(): string; + /** + * Returns an ES6 `Iterator` over the values of each name-value pair. + */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[string, string]>; + } + import { URL as _URL, URLSearchParams as _URLSearchParams } from "url"; + global { + interface URLSearchParams extends _URLSearchParams {} + interface URL extends _URL {} + interface Global { + URL: typeof _URL; + URLSearchParams: typeof _URLSearchParams; + } + /** + * `URL` class is a global reference for `require('url').URL` + * https://nodejs.org/api/url.html#the-whatwg-url-api + * @since v10.0.0 + */ + var URL: typeof globalThis extends { + onmessage: any; + URL: infer T; + } ? T + : typeof _URL; + /** + * `URLSearchParams` class is a global reference for `require('url').URLSearchParams` + * https://nodejs.org/api/url.html#class-urlsearchparams + * @since v10.0.0 + */ + var URLSearchParams: typeof globalThis extends { + onmessage: any; + URLSearchParams: infer T; + } ? T + : typeof _URLSearchParams; + } +} +declare module "node:url" { + export * from "url"; +} diff --git a/node_modules/@types/node/util.d.ts b/node_modules/@types/node/util.d.ts new file mode 100644 index 0000000..08712f5 --- /dev/null +++ b/node_modules/@types/node/util.d.ts @@ -0,0 +1,2183 @@ +/** + * The `node:util` module supports the needs of Node.js internal APIs. Many of the + * utilities are useful for application and module developers as well. To access + * it: + * + * ```js + * const util = require('node:util'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/util.js) + */ +declare module "util" { + import * as types from "node:util/types"; + export interface InspectOptions { + /** + * If `true`, object's non-enumerable symbols and properties are included in the formatted result. + * `WeakMap` and `WeakSet` entries are also included as well as user defined prototype properties (excluding method properties). + * @default false + */ + showHidden?: boolean | undefined; + /** + * Specifies the number of times to recurse while formatting object. + * This is useful for inspecting large objects. + * To recurse up to the maximum call stack size pass `Infinity` or `null`. + * @default 2 + */ + depth?: number | null | undefined; + /** + * If `true`, the output is styled with ANSI color codes. Colors are customizable. + */ + colors?: boolean | undefined; + /** + * If `false`, `[util.inspect.custom](depth, opts, inspect)` functions are not invoked. + * @default true + */ + customInspect?: boolean | undefined; + /** + * If `true`, `Proxy` inspection includes the target and handler objects. + * @default false + */ + showProxy?: boolean | undefined; + /** + * Specifies the maximum number of `Array`, `TypedArray`, `WeakMap`, and `WeakSet` elements + * to include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no elements. + * @default 100 + */ + maxArrayLength?: number | null | undefined; + /** + * Specifies the maximum number of characters to + * include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no characters. + * @default 10000 + */ + maxStringLength?: number | null | undefined; + /** + * The length at which input values are split across multiple lines. + * Set to `Infinity` to format the input as a single line + * (in combination with `compact` set to `true` or any number >= `1`). + * @default 80 + */ + breakLength?: number | undefined; + /** + * Setting this to `false` causes each object key + * to be displayed on a new line. It will also add new lines to text that is + * longer than `breakLength`. If set to a number, the most `n` inner elements + * are united on a single line as long as all properties fit into + * `breakLength`. Short array elements are also grouped together. Note that no + * text will be reduced below 16 characters, no matter the `breakLength` size. + * For more information, see the example below. + * @default true + */ + compact?: boolean | number | undefined; + /** + * If set to `true` or a function, all properties of an object, and `Set` and `Map` + * entries are sorted in the resulting string. + * If set to `true` the default sort is used. + * If set to a function, it is used as a compare function. + */ + sorted?: boolean | ((a: string, b: string) => number) | undefined; + /** + * If set to `true`, getters are going to be + * inspected as well. If set to `'get'` only getters without setter are going + * to be inspected. If set to `'set'` only getters having a corresponding + * setter are going to be inspected. This might cause side effects depending on + * the getter function. + * @default false + */ + getters?: "get" | "set" | boolean | undefined; + /** + * If set to `true`, an underscore is used to separate every three digits in all bigints and numbers. + * @default false + */ + numericSeparator?: boolean | undefined; + } + export type Style = + | "special" + | "number" + | "bigint" + | "boolean" + | "undefined" + | "null" + | "string" + | "symbol" + | "date" + | "regexp" + | "module"; + export type CustomInspectFunction = (depth: number, options: InspectOptionsStylized) => any; // TODO: , inspect: inspect + export interface InspectOptionsStylized extends InspectOptions { + stylize(text: string, styleType: Style): string; + } + /** + * The `util.format()` method returns a formatted string using the first argument + * as a `printf`\-like format string which can contain zero or more format + * specifiers. Each specifier is replaced with the converted value from the + * corresponding argument. Supported specifiers are: + * + * If a specifier does not have a corresponding argument, it is not replaced: + * + * ```js + * util.format('%s:%s', 'foo'); + * // Returns: 'foo:%s' + * ``` + * + * Values that are not part of the format string are formatted using`util.inspect()` if their type is not `string`. + * + * If there are more arguments passed to the `util.format()` method than the + * number of specifiers, the extra arguments are concatenated to the returned + * string, separated by spaces: + * + * ```js + * util.format('%s:%s', 'foo', 'bar', 'baz'); + * // Returns: 'foo:bar baz' + * ``` + * + * If the first argument does not contain a valid format specifier, `util.format()`returns a string that is the concatenation of all arguments separated by spaces: + * + * ```js + * util.format(1, 2, 3); + * // Returns: '1 2 3' + * ``` + * + * If only one argument is passed to `util.format()`, it is returned as it is + * without any formatting: + * + * ```js + * util.format('%% %s'); + * // Returns: '%% %s' + * ``` + * + * `util.format()` is a synchronous method that is intended as a debugging tool. + * Some input values can have a significant performance overhead that can block the + * event loop. Use this function with care and never in a hot code path. + * @since v0.5.3 + * @param format A `printf`-like format string. + */ + export function format(format?: any, ...param: any[]): string; + /** + * This function is identical to {@link format}, except in that it takes + * an `inspectOptions` argument which specifies options that are passed along to {@link inspect}. + * + * ```js + * util.formatWithOptions({ colors: true }, 'See object %O', { foo: 42 }); + * // Returns 'See object { foo: 42 }', where `42` is colored as a number + * // when printed to a terminal. + * ``` + * @since v10.0.0 + */ + export function formatWithOptions(inspectOptions: InspectOptions, format?: any, ...param: any[]): string; + /** + * Returns the string name for a numeric error code that comes from a Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const name = util.getSystemErrorName(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v9.7.0 + */ + export function getSystemErrorName(err: number): string; + /** + * Returns a Map of all system error codes available from the Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const errorMap = util.getSystemErrorMap(); + * const name = errorMap.get(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v16.0.0, v14.17.0 + */ + export function getSystemErrorMap(): Map; + /** + * The `util.log()` method prints the given `string` to `stdout` with an included + * timestamp. + * + * ```js + * const util = require('node:util'); + * + * util.log('Timestamped message.'); + * ``` + * @since v0.3.0 + * @deprecated Since v6.0.0 - Use a third party module instead. + */ + export function log(string: string): void; + /** + * Returns the `string` after replacing any surrogate code points + * (or equivalently, any unpaired surrogate code units) with the + * Unicode "replacement character" U+FFFD. + * @since v16.8.0, v14.18.0 + */ + export function toUSVString(string: string): string; + /** + * Creates and returns an `AbortController` instance whose `AbortSignal` is marked + * as transferable and can be used with `structuredClone()` or `postMessage()`. + * @since v18.11.0 + * @experimental + * @returns A transferable AbortController + */ + export function transferableAbortController(): AbortController; + /** + * Marks the given `AbortSignal` as transferable so that it can be used with`structuredClone()` and `postMessage()`. + * + * ```js + * const signal = transferableAbortSignal(AbortSignal.timeout(100)); + * const channel = new MessageChannel(); + * channel.port2.postMessage(signal, [signal]); + * ``` + * @since v18.11.0 + * @experimental + * @param signal The AbortSignal + * @returns The same AbortSignal + */ + export function transferableAbortSignal(signal: AbortSignal): AbortSignal; + /** + * Listens to abort event on the provided `signal` and + * returns a promise that is fulfilled when the `signal` is + * aborted. If the passed `resource` is garbage collected before the `signal` is + * aborted, the returned promise shall remain pending indefinitely. + * + * ```js + * import { aborted } from 'node:util'; + * + * const dependent = obtainSomethingAbortable(); + * + * aborted(dependent.signal, dependent).then(() => { + * // Do something when dependent is aborted. + * }); + * + * dependent.on('event', () => { + * dependent.abort(); + * }); + * ``` + * @since v19.7.0 + * @experimental + * @param resource Any non-null entity, reference to which is held weakly. + */ + export function aborted(signal: AbortSignal, resource: any): Promise; + /** + * The `util.inspect()` method returns a string representation of `object` that is + * intended for debugging. The output of `util.inspect` may change at any time + * and should not be depended upon programmatically. Additional `options` may be + * passed that alter the result.`util.inspect()` will use the constructor's name and/or `@@toStringTag` to make + * an identifiable tag for an inspected value. + * + * ```js + * class Foo { + * get [Symbol.toStringTag]() { + * return 'bar'; + * } + * } + * + * class Bar {} + * + * const baz = Object.create(null, { [Symbol.toStringTag]: { value: 'foo' } }); + * + * util.inspect(new Foo()); // 'Foo [bar] {}' + * util.inspect(new Bar()); // 'Bar {}' + * util.inspect(baz); // '[foo] {}' + * ``` + * + * Circular references point to their anchor by using a reference index: + * + * ```js + * const { inspect } = require('node:util'); + * + * const obj = {}; + * obj.a = [obj]; + * obj.b = {}; + * obj.b.inner = obj.b; + * obj.b.obj = obj; + * + * console.log(inspect(obj)); + * // { + * // a: [ [Circular *1] ], + * // b: { inner: [Circular *2], obj: [Circular *1] } + * // } + * ``` + * + * The following example inspects all properties of the `util` object: + * + * ```js + * const util = require('node:util'); + * + * console.log(util.inspect(util, { showHidden: true, depth: null })); + * ``` + * + * The following example highlights the effect of the `compact` option: + * + * ```js + * const util = require('node:util'); + * + * const o = { + * a: [1, 2, [[ + * 'Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit, sed do ' + + * 'eiusmod \ntempor incididunt ut labore et dolore magna aliqua.', + * 'test', + * 'foo']], 4], + * b: new Map([['za', 1], ['zb', 'test']]), + * }; + * console.log(util.inspect(o, { compact: true, depth: 5, breakLength: 80 })); + * + * // { a: + * // [ 1, + * // 2, + * // [ [ 'Lorem ipsum dolor sit amet,\nconsectetur [...]', // A long line + * // 'test', + * // 'foo' ] ], + * // 4 ], + * // b: Map(2) { 'za' => 1, 'zb' => 'test' } } + * + * // Setting `compact` to false or an integer creates more reader friendly output. + * console.log(util.inspect(o, { compact: false, depth: 5, breakLength: 80 })); + * + * // { + * // a: [ + * // 1, + * // 2, + * // [ + * // [ + * // 'Lorem ipsum dolor sit amet,\n' + + * // 'consectetur adipiscing elit, sed do eiusmod \n' + + * // 'tempor incididunt ut labore et dolore magna aliqua.', + * // 'test', + * // 'foo' + * // ] + * // ], + * // 4 + * // ], + * // b: Map(2) { + * // 'za' => 1, + * // 'zb' => 'test' + * // } + * // } + * + * // Setting `breakLength` to e.g. 150 will print the "Lorem ipsum" text in a + * // single line. + * ``` + * + * The `showHidden` option allows [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) and + * [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries to be + * inspected. If there are more entries than `maxArrayLength`, there is no + * guarantee which entries are displayed. That means retrieving the same [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries twice may + * result in different output. Furthermore, entries + * with no remaining strong references may be garbage collected at any time. + * + * ```js + * const { inspect } = require('node:util'); + * + * const obj = { a: 1 }; + * const obj2 = { b: 2 }; + * const weakSet = new WeakSet([obj, obj2]); + * + * console.log(inspect(weakSet, { showHidden: true })); + * // WeakSet { { a: 1 }, { b: 2 } } + * ``` + * + * The `sorted` option ensures that an object's property insertion order does not + * impact the result of `util.inspect()`. + * + * ```js + * const { inspect } = require('node:util'); + * const assert = require('node:assert'); + * + * const o1 = { + * b: [2, 3, 1], + * a: '`a` comes before `b`', + * c: new Set([2, 3, 1]), + * }; + * console.log(inspect(o1, { sorted: true })); + * // { a: '`a` comes before `b`', b: [ 2, 3, 1 ], c: Set(3) { 1, 2, 3 } } + * console.log(inspect(o1, { sorted: (a, b) => b.localeCompare(a) })); + * // { c: Set(3) { 3, 2, 1 }, b: [ 2, 3, 1 ], a: '`a` comes before `b`' } + * + * const o2 = { + * c: new Set([2, 1, 3]), + * a: '`a` comes before `b`', + * b: [2, 3, 1], + * }; + * assert.strict.equal( + * inspect(o1, { sorted: true }), + * inspect(o2, { sorted: true }), + * ); + * ``` + * + * The `numericSeparator` option adds an underscore every three digits to all + * numbers. + * + * ```js + * const { inspect } = require('node:util'); + * + * const thousand = 1_000; + * const million = 1_000_000; + * const bigNumber = 123_456_789n; + * const bigDecimal = 1_234.123_45; + * + * console.log(inspect(thousand, { numericSeparator: true })); + * // 1_000 + * console.log(inspect(million, { numericSeparator: true })); + * // 1_000_000 + * console.log(inspect(bigNumber, { numericSeparator: true })); + * // 123_456_789n + * console.log(inspect(bigDecimal, { numericSeparator: true })); + * // 1_234.123_45 + * ``` + * + * `util.inspect()` is a synchronous method intended for debugging. Its maximum + * output length is approximately 128 MiB. Inputs that result in longer output will + * be truncated. + * @since v0.3.0 + * @param object Any JavaScript primitive or `Object`. + * @return The representation of `object`. + */ + export function inspect(object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; + export function inspect(object: any, options?: InspectOptions): string; + export namespace inspect { + let colors: NodeJS.Dict<[number, number]>; + let styles: { + [K in Style]: string; + }; + let defaultOptions: InspectOptions; + /** + * Allows changing inspect settings from the repl. + */ + let replDefaults: InspectOptions; + /** + * That can be used to declare custom inspect functions. + */ + const custom: unique symbol; + } + /** + * Alias for [`Array.isArray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray). + * + * Returns `true` if the given `object` is an `Array`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isArray([]); + * // Returns: true + * util.isArray(new Array()); + * // Returns: true + * util.isArray({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use `isArray` instead. + */ + export function isArray(object: unknown): object is unknown[]; + /** + * Returns `true` if the given `object` is a `RegExp`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isRegExp(/some regexp/); + * // Returns: true + * util.isRegExp(new RegExp('another regexp')); + * // Returns: true + * util.isRegExp({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Deprecated + */ + export function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the given `object` is a `Date`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isDate(new Date()); + * // Returns: true + * util.isDate(Date()); + * // false (without 'new' returns a String) + * util.isDate({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isDate} instead. + */ + export function isDate(object: unknown): object is Date; + /** + * Returns `true` if the given `object` is an `Error`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isError(new Error()); + * // Returns: true + * util.isError(new TypeError()); + * // Returns: true + * util.isError({ name: 'Error', message: 'an error occurred' }); + * // Returns: false + * ``` + * + * This method relies on `Object.prototype.toString()` behavior. It is + * possible to obtain an incorrect result when the `object` argument manipulates`@@toStringTag`. + * + * ```js + * const util = require('node:util'); + * const obj = { name: 'Error', message: 'an error occurred' }; + * + * util.isError(obj); + * // Returns: false + * obj[Symbol.toStringTag] = 'Error'; + * util.isError(obj); + * // Returns: true + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isNativeError} instead. + */ + export function isError(object: unknown): object is Error; + /** + * Usage of `util.inherits()` is discouraged. Please use the ES6 `class` and`extends` keywords to get language level inheritance support. Also note + * that the two styles are [semantically incompatible](https://github.com/nodejs/node/issues/4179). + * + * Inherit the prototype methods from one [constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/constructor) into another. The + * prototype of `constructor` will be set to a new object created from`superConstructor`. + * + * This mainly adds some input validation on top of`Object.setPrototypeOf(constructor.prototype, superConstructor.prototype)`. + * As an additional convenience, `superConstructor` will be accessible + * through the `constructor.super_` property. + * + * ```js + * const util = require('node:util'); + * const EventEmitter = require('node:events'); + * + * function MyStream() { + * EventEmitter.call(this); + * } + * + * util.inherits(MyStream, EventEmitter); + * + * MyStream.prototype.write = function(data) { + * this.emit('data', data); + * }; + * + * const stream = new MyStream(); + * + * console.log(stream instanceof EventEmitter); // true + * console.log(MyStream.super_ === EventEmitter); // true + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('It works!'); // Received data: "It works!" + * ``` + * + * ES6 example using `class` and `extends`: + * + * ```js + * const EventEmitter = require('node:events'); + * + * class MyStream extends EventEmitter { + * write(data) { + * this.emit('data', data); + * } + * } + * + * const stream = new MyStream(); + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('With ES6'); + * ``` + * @since v0.3.0 + * @legacy Use ES2015 class syntax and `extends` keyword instead. + */ + export function inherits(constructor: unknown, superConstructor: unknown): void; + export type DebugLoggerFunction = (msg: string, ...param: unknown[]) => void; + export interface DebugLogger extends DebugLoggerFunction { + enabled: boolean; + } + /** + * The `util.debuglog()` method is used to create a function that conditionally + * writes debug messages to `stderr` based on the existence of the `NODE_DEBUG`environment variable. If the `section` name appears within the value of that + * environment variable, then the returned function operates similar to `console.error()`. If not, then the returned function is a no-op. + * + * ```js + * const util = require('node:util'); + * const debuglog = util.debuglog('foo'); + * + * debuglog('hello from foo [%d]', 123); + * ``` + * + * If this program is run with `NODE_DEBUG=foo` in the environment, then + * it will output something like: + * + * ```console + * FOO 3245: hello from foo [123] + * ``` + * + * where `3245` is the process id. If it is not run with that + * environment variable set, then it will not print anything. + * + * The `section` supports wildcard also: + * + * ```js + * const util = require('node:util'); + * const debuglog = util.debuglog('foo-bar'); + * + * debuglog('hi there, it\'s foo-bar [%d]', 2333); + * ``` + * + * if it is run with `NODE_DEBUG=foo*` in the environment, then it will output + * something like: + * + * ```console + * FOO-BAR 3257: hi there, it's foo-bar [2333] + * ``` + * + * Multiple comma-separated `section` names may be specified in the `NODE_DEBUG`environment variable: `NODE_DEBUG=fs,net,tls`. + * + * The optional `callback` argument can be used to replace the logging function + * with a different function that doesn't have any initialization or + * unnecessary wrapping. + * + * ```js + * const util = require('node:util'); + * let debuglog = util.debuglog('internals', (debug) => { + * // Replace with a logging function that optimizes out + * // testing if the section is enabled + * debuglog = debug; + * }); + * ``` + * @since v0.11.3 + * @param section A string identifying the portion of the application for which the `debuglog` function is being created. + * @param callback A callback invoked the first time the logging function is called with a function argument that is a more optimized logging function. + * @return The logging function + */ + export function debuglog(section: string, callback?: (fn: DebugLoggerFunction) => void): DebugLogger; + export const debug: typeof debuglog; + /** + * Returns `true` if the given `object` is a `Boolean`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isBoolean(1); + * // Returns: false + * util.isBoolean(0); + * // Returns: false + * util.isBoolean(false); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'boolean'` instead. + */ + export function isBoolean(object: unknown): object is boolean; + /** + * Returns `true` if the given `object` is a `Buffer`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isBuffer({ length: 0 }); + * // Returns: false + * util.isBuffer([]); + * // Returns: false + * util.isBuffer(Buffer.from('hello world')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `isBuffer` instead. + */ + export function isBuffer(object: unknown): object is Buffer; + /** + * Returns `true` if the given `object` is a `Function`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * function Foo() {} + * const Bar = () => {}; + * + * util.isFunction({}); + * // Returns: false + * util.isFunction(Foo); + * // Returns: true + * util.isFunction(Bar); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'function'` instead. + */ + export function isFunction(object: unknown): boolean; + /** + * Returns `true` if the given `object` is strictly `null`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNull(0); + * // Returns: false + * util.isNull(undefined); + * // Returns: false + * util.isNull(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === null` instead. + */ + export function isNull(object: unknown): object is null; + /** + * Returns `true` if the given `object` is `null` or `undefined`. Otherwise, + * returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNullOrUndefined(0); + * // Returns: false + * util.isNullOrUndefined(undefined); + * // Returns: true + * util.isNullOrUndefined(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined || value === null` instead. + */ + export function isNullOrUndefined(object: unknown): object is null | undefined; + /** + * Returns `true` if the given `object` is a `Number`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNumber(false); + * // Returns: false + * util.isNumber(Infinity); + * // Returns: true + * util.isNumber(0); + * // Returns: true + * util.isNumber(NaN); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'number'` instead. + */ + export function isNumber(object: unknown): object is number; + /** + * Returns `true` if the given `object` is strictly an `Object`**and** not a`Function` (even though functions are objects in JavaScript). + * Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isObject(5); + * // Returns: false + * util.isObject(null); + * // Returns: false + * util.isObject({}); + * // Returns: true + * util.isObject(() => {}); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value !== null && typeof value === 'object'` instead. + */ + export function isObject(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a primitive type. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isPrimitive(5); + * // Returns: true + * util.isPrimitive('foo'); + * // Returns: true + * util.isPrimitive(false); + * // Returns: true + * util.isPrimitive(null); + * // Returns: true + * util.isPrimitive(undefined); + * // Returns: true + * util.isPrimitive({}); + * // Returns: false + * util.isPrimitive(() => {}); + * // Returns: false + * util.isPrimitive(/^$/); + * // Returns: false + * util.isPrimitive(new Date()); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `(typeof value !== 'object' && typeof value !== 'function') || value === null` instead. + */ + export function isPrimitive(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a `string`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isString(''); + * // Returns: true + * util.isString('foo'); + * // Returns: true + * util.isString(String('foo')); + * // Returns: true + * util.isString(5); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'string'` instead. + */ + export function isString(object: unknown): object is string; + /** + * Returns `true` if the given `object` is a `Symbol`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isSymbol(5); + * // Returns: false + * util.isSymbol('foo'); + * // Returns: false + * util.isSymbol(Symbol('foo')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'symbol'` instead. + */ + export function isSymbol(object: unknown): object is symbol; + /** + * Returns `true` if the given `object` is `undefined`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * const foo = undefined; + * util.isUndefined(5); + * // Returns: false + * util.isUndefined(foo); + * // Returns: true + * util.isUndefined(null); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined` instead. + */ + export function isUndefined(object: unknown): object is undefined; + /** + * The `util.deprecate()` method wraps `fn` (which may be a function or class) in + * such a way that it is marked as deprecated. + * + * ```js + * const util = require('node:util'); + * + * exports.obsoleteFunction = util.deprecate(() => { + * // Do something here. + * }, 'obsoleteFunction() is deprecated. Use newShinyFunction() instead.'); + * ``` + * + * When called, `util.deprecate()` will return a function that will emit a`DeprecationWarning` using the `'warning'` event. The warning will + * be emitted and printed to `stderr` the first time the returned function is + * called. After the warning is emitted, the wrapped function is called without + * emitting a warning. + * + * If the same optional `code` is supplied in multiple calls to `util.deprecate()`, + * the warning will be emitted only once for that `code`. + * + * ```js + * const util = require('node:util'); + * + * const fn1 = util.deprecate(someFunction, someMessage, 'DEP0001'); + * const fn2 = util.deprecate(someOtherFunction, someOtherMessage, 'DEP0001'); + * fn1(); // Emits a deprecation warning with code DEP0001 + * fn2(); // Does not emit a deprecation warning because it has the same code + * ``` + * + * If either the `--no-deprecation` or `--no-warnings` command-line flags are + * used, or if the `process.noDeprecation` property is set to `true`_prior_ to + * the first deprecation warning, the `util.deprecate()` method does nothing. + * + * If the `--trace-deprecation` or `--trace-warnings` command-line flags are set, + * or the `process.traceDeprecation` property is set to `true`, a warning and a + * stack trace are printed to `stderr` the first time the deprecated function is + * called. + * + * If the `--throw-deprecation` command-line flag is set, or the`process.throwDeprecation` property is set to `true`, then an exception will be + * thrown when the deprecated function is called. + * + * The `--throw-deprecation` command-line flag and `process.throwDeprecation`property take precedence over `--trace-deprecation` and`process.traceDeprecation`. + * @since v0.8.0 + * @param fn The function that is being deprecated. + * @param msg A warning message to display when the deprecated function is invoked. + * @param code A deprecation code. See the `list of deprecated APIs` for a list of codes. + * @return The deprecated function wrapped to emit a warning. + */ + export function deprecate(fn: T, msg: string, code?: string): T; + /** + * Returns `true` if there is deep strict equality between `val1` and `val2`. + * Otherwise, returns `false`. + * + * See `assert.deepStrictEqual()` for more information about deep strict + * equality. + * @since v9.0.0 + */ + export function isDeepStrictEqual(val1: unknown, val2: unknown): boolean; + /** + * Returns `str` with any ANSI escape codes removed. + * + * ```js + * console.log(util.stripVTControlCharacters('\u001B[4mvalue\u001B[0m')); + * // Prints "value" + * ``` + * @since v16.11.0 + */ + export function stripVTControlCharacters(str: string): string; + /** + * Takes an `async` function (or a function that returns a `Promise`) and returns a + * function following the error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument. In the callback, the + * first argument will be the rejection reason (or `null` if the `Promise`resolved), and the second argument will be the resolved value. + * + * ```js + * const util = require('node:util'); + * + * async function fn() { + * return 'hello world'; + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * if (err) throw err; + * console.log(ret); + * }); + * ``` + * + * Will print: + * + * ```text + * hello world + * ``` + * + * The callback is executed asynchronously, and will have a limited stack trace. + * If the callback throws, the process will emit an `'uncaughtException'` event, and if not handled will exit. + * + * Since `null` has a special meaning as the first argument to a callback, if a + * wrapped function rejects a `Promise` with a falsy value as a reason, the value + * is wrapped in an `Error` with the original value stored in a field named`reason`. + * + * ```js + * function fn() { + * return Promise.reject(null); + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * // When the Promise was rejected with `null` it is wrapped with an Error and + * // the original value is stored in `reason`. + * err && Object.hasOwn(err, 'reason') && err.reason === null; // true + * }); + * ``` + * @since v8.2.0 + * @param fn An `async` function + * @return a callback style function + */ + export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: () => Promise, + ): (callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export interface CustomPromisifyLegacy extends Function { + __promisify__: TCustom; + } + export interface CustomPromisifySymbol extends Function { + [promisify.custom]: TCustom; + } + export type CustomPromisify = + | CustomPromisifySymbol + | CustomPromisifyLegacy; + /** + * Takes a function following the common error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument, and returns a version + * that returns promises. + * + * ```js + * const util = require('node:util'); + * const fs = require('node:fs'); + * + * const stat = util.promisify(fs.stat); + * stat('.').then((stats) => { + * // Do something with `stats` + * }).catch((error) => { + * // Handle the error. + * }); + * ``` + * + * Or, equivalently using `async function`s: + * + * ```js + * const util = require('node:util'); + * const fs = require('node:fs'); + * + * const stat = util.promisify(fs.stat); + * + * async function callStat() { + * const stats = await stat('.'); + * console.log(`This directory is owned by ${stats.uid}`); + * } + * + * callStat(); + * ``` + * + * If there is an `original[util.promisify.custom]` property present, `promisify`will return its value, see `Custom promisified functions`. + * + * `promisify()` assumes that `original` is a function taking a callback as its + * final argument in all cases. If `original` is not a function, `promisify()`will throw an error. If `original` is a function but its last argument is not + * an error-first callback, it will still be passed an error-first + * callback as its last argument. + * + * Using `promisify()` on class methods or other methods that use `this` may not + * work as expected unless handled specially: + * + * ```js + * const util = require('node:util'); + * + * class Foo { + * constructor() { + * this.a = 42; + * } + * + * bar(callback) { + * callback(null, this.a); + * } + * } + * + * const foo = new Foo(); + * + * const naiveBar = util.promisify(foo.bar); + * // TypeError: Cannot read property 'a' of undefined + * // naiveBar().then(a => console.log(a)); + * + * naiveBar.call(foo).then((a) => console.log(a)); // '42' + * + * const bindBar = naiveBar.bind(foo); + * bindBar().then((a) => console.log(a)); // '42' + * ``` + * @since v8.0.0 + */ + export function promisify(fn: CustomPromisify): TCustom; + export function promisify( + fn: (callback: (err: any, result: TResult) => void) => void, + ): () => Promise; + export function promisify(fn: (callback: (err?: any) => void) => void): () => Promise; + export function promisify( + fn: (arg1: T1, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1) => Promise; + export function promisify(fn: (arg1: T1, callback: (err?: any) => void) => void): (arg1: T1) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify(fn: Function): Function; + export namespace promisify { + /** + * That can be used to declare custom promisified variants of functions. + */ + const custom: unique symbol; + } + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextDecoder` API. + * + * ```js + * const decoder = new TextDecoder(); + * const u8arr = new Uint8Array([72, 101, 108, 108, 111]); + * console.log(decoder.decode(u8arr)); // Hello + * ``` + * @since v8.3.0 + */ + export class TextDecoder { + /** + * The encoding supported by the `TextDecoder` instance. + */ + readonly encoding: string; + /** + * The value will be `true` if decoding errors result in a `TypeError` being + * thrown. + */ + readonly fatal: boolean; + /** + * The value will be `true` if the decoding result will include the byte order + * mark. + */ + readonly ignoreBOM: boolean; + constructor( + encoding?: string, + options?: { + fatal?: boolean | undefined; + ignoreBOM?: boolean | undefined; + }, + ); + /** + * Decodes the `input` and returns a string. If `options.stream` is `true`, any + * incomplete byte sequences occurring at the end of the `input` are buffered + * internally and emitted after the next call to `textDecoder.decode()`. + * + * If `textDecoder.fatal` is `true`, decoding errors that occur will result in a`TypeError` being thrown. + * @param input An `ArrayBuffer`, `DataView`, or `TypedArray` instance containing the encoded data. + */ + decode( + input?: NodeJS.ArrayBufferView | ArrayBuffer | null, + options?: { + stream?: boolean | undefined; + }, + ): string; + } + export interface EncodeIntoResult { + /** + * The read Unicode code units of input. + */ + read: number; + /** + * The written UTF-8 bytes of output. + */ + written: number; + } + export { types }; + + //// TextEncoder/Decoder + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextEncoder` API. All + * instances of `TextEncoder` only support UTF-8 encoding. + * + * ```js + * const encoder = new TextEncoder(); + * const uint8array = encoder.encode('this is some data'); + * ``` + * + * The `TextEncoder` class is also available on the global object. + * @since v8.3.0 + */ + export class TextEncoder { + /** + * The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. + */ + readonly encoding: string; + /** + * UTF-8 encodes the `input` string and returns a `Uint8Array` containing the + * encoded bytes. + * @param [input='an empty string'] The text to encode. + */ + encode(input?: string): Uint8Array; + /** + * UTF-8 encodes the `src` string to the `dest` Uint8Array and returns an object + * containing the read Unicode code units and written UTF-8 bytes. + * + * ```js + * const encoder = new TextEncoder(); + * const src = 'this is some data'; + * const dest = new Uint8Array(10); + * const { read, written } = encoder.encodeInto(src, dest); + * ``` + * @param src The text to encode. + * @param dest The array to hold the encode result. + */ + encodeInto(src: string, dest: Uint8Array): EncodeIntoResult; + } + import { TextDecoder as _TextDecoder, TextEncoder as _TextEncoder } from "util"; + global { + /** + * `TextDecoder` class is a global reference for `require('util').TextDecoder` + * https://nodejs.org/api/globals.html#textdecoder + * @since v11.0.0 + */ + var TextDecoder: typeof globalThis extends { + onmessage: any; + TextDecoder: infer TextDecoder; + } ? TextDecoder + : typeof _TextDecoder; + /** + * `TextEncoder` class is a global reference for `require('util').TextEncoder` + * https://nodejs.org/api/globals.html#textencoder + * @since v11.0.0 + */ + var TextEncoder: typeof globalThis extends { + onmessage: any; + TextEncoder: infer TextEncoder; + } ? TextEncoder + : typeof _TextEncoder; + } + + //// parseArgs + /** + * Provides a higher level API for command-line argument parsing than interacting + * with `process.argv` directly. Takes a specification for the expected arguments + * and returns a structured object with the parsed options and positionals. + * + * ```js + * import { parseArgs } from 'node:util'; + * const args = ['-f', '--bar', 'b']; + * const options = { + * foo: { + * type: 'boolean', + * short: 'f', + * }, + * bar: { + * type: 'string', + * }, + * }; + * const { + * values, + * positionals, + * } = parseArgs({ args, options }); + * console.log(values, positionals); + * // Prints: [Object: null prototype] { foo: true, bar: 'b' } [] + * ``` + * @since v18.3.0, v16.17.0 + * @param config Used to provide arguments for parsing and to configure the parser. `config` supports the following properties: + * @return The parsed command line arguments: + */ + export function parseArgs(config?: T): ParsedResults; + interface ParseArgsOptionConfig { + /** + * Type of argument. + */ + type: "string" | "boolean"; + /** + * Whether this option can be provided multiple times. + * If `true`, all values will be collected in an array. + * If `false`, values for the option are last-wins. + * @default false. + */ + multiple?: boolean | undefined; + /** + * A single character alias for the option. + */ + short?: string | undefined; + /** + * The default option value when it is not set by args. + * It must be of the same type as the the `type` property. + * When `multiple` is `true`, it must be an array. + * @since v18.11.0 + */ + default?: string | boolean | string[] | boolean[] | undefined; + } + interface ParseArgsOptionsConfig { + [longOption: string]: ParseArgsOptionConfig; + } + export interface ParseArgsConfig { + /** + * Array of argument strings. + */ + args?: string[] | undefined; + /** + * Used to describe arguments known to the parser. + */ + options?: ParseArgsOptionsConfig | undefined; + /** + * Should an error be thrown when unknown arguments are encountered, + * or when arguments are passed that do not match the `type` configured in `options`. + * @default true + */ + strict?: boolean | undefined; + /** + * Whether this command accepts positional arguments. + */ + allowPositionals?: boolean | undefined; + /** + * Return the parsed tokens. This is useful for extending the built-in behavior, + * from adding additional checks through to reprocessing the tokens in different ways. + * @default false + */ + tokens?: boolean | undefined; + } + /* + IfDefaultsTrue and IfDefaultsFalse are helpers to handle default values for missing boolean properties. + TypeScript does not have exact types for objects: https://github.com/microsoft/TypeScript/issues/12936 + This means it is impossible to distinguish between "field X is definitely not present" and "field X may or may not be present". + But we expect users to generally provide their config inline or `as const`, which means TS will always know whether a given field is present. + So this helper treats "not definitely present" (i.e., not `extends boolean`) as being "definitely not present", i.e. it should have its default value. + This is technically incorrect but is a much nicer UX for the common case. + The IfDefaultsTrue version is for things which default to true; the IfDefaultsFalse version is for things which default to false. + */ + type IfDefaultsTrue = T extends true ? IfTrue + : T extends false ? IfFalse + : IfTrue; + + // we put the `extends false` condition first here because `undefined` compares like `any` when `strictNullChecks: false` + type IfDefaultsFalse = T extends false ? IfFalse + : T extends true ? IfTrue + : IfFalse; + + type ExtractOptionValue = IfDefaultsTrue< + T["strict"], + O["type"] extends "string" ? string : O["type"] extends "boolean" ? boolean : string | boolean, + string | boolean + >; + + type ParsedValues = + & IfDefaultsTrue + & (T["options"] extends ParseArgsOptionsConfig ? { + -readonly [LongOption in keyof T["options"]]: IfDefaultsFalse< + T["options"][LongOption]["multiple"], + undefined | Array>, + undefined | ExtractOptionValue + >; + } + : {}); + + type ParsedPositionals = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type PreciseTokenForOptions< + K extends string, + O extends ParseArgsOptionConfig, + > = O["type"] extends "string" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: string; + inlineValue: boolean; + } + : O["type"] extends "boolean" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: undefined; + inlineValue: undefined; + } + : OptionToken & { name: K }; + + type TokenForOptions< + T extends ParseArgsConfig, + K extends keyof T["options"] = keyof T["options"], + > = K extends unknown + ? T["options"] extends ParseArgsOptionsConfig ? PreciseTokenForOptions + : OptionToken + : never; + + type ParsedOptionToken = IfDefaultsTrue, OptionToken>; + + type ParsedPositionalToken = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type ParsedTokens = Array< + ParsedOptionToken | ParsedPositionalToken | { kind: "option-terminator"; index: number } + >; + + type PreciseParsedResults = IfDefaultsFalse< + T["tokens"], + { + values: ParsedValues; + positionals: ParsedPositionals; + tokens: ParsedTokens; + }, + { + values: ParsedValues; + positionals: ParsedPositionals; + } + >; + + type OptionToken = + | { kind: "option"; index: number; name: string; rawName: string; value: string; inlineValue: boolean } + | { + kind: "option"; + index: number; + name: string; + rawName: string; + value: undefined; + inlineValue: undefined; + }; + + type Token = + | OptionToken + | { kind: "positional"; index: number; value: string } + | { kind: "option-terminator"; index: number }; + + // If ParseArgsConfig extends T, then the user passed config constructed elsewhere. + // So we can't rely on the `"not definitely present" implies "definitely not present"` assumption mentioned above. + type ParsedResults = ParseArgsConfig extends T ? { + values: { + [longOption: string]: undefined | string | boolean | Array; + }; + positionals: string[]; + tokens?: Token[]; + } + : PreciseParsedResults; + + /** + * An implementation of [the MIMEType class](https://bmeck.github.io/node-proposal-mime-api/). + * + * In accordance with browser conventions, all properties of `MIMEType` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. + * + * A MIME string is a structured string containing multiple meaningful + * components. When parsed, a `MIMEType` object is returned containing + * properties for each of these components. + * @since v19.1.0, v18.13.0 + * @experimental + */ + export class MIMEType { + /** + * Creates a new MIMEType object by parsing the input. + * + * A `TypeError` will be thrown if the `input` is not a valid MIME. + * Note that an effort will be made to coerce the given values into strings. + * @param input The input MIME to parse. + */ + constructor(input: string | { toString: () => string }); + + /** + * Gets and sets the type portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript'); + * console.log(myMIME.type); + * // Prints: text + * myMIME.type = 'application'; + * console.log(myMIME.type); + * // Prints: application + * console.log(String(myMIME)); + * // Prints: application/javascript + * ``` + */ + type: string; + /** + * Gets and sets the subtype portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/ecmascript'); + * console.log(myMIME.subtype); + * // Prints: ecmascript + * myMIME.subtype = 'javascript'; + * console.log(myMIME.subtype); + * // Prints: javascript + * console.log(String(myMIME)); + * // Prints: text/javascript + * ``` + */ + subtype: string; + /** + * Gets the essence of the MIME. This property is read only. + * Use `mime.type` or `mime.subtype` to alter the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript;key=value'); + * console.log(myMIME.essence); + * // Prints: text/javascript + * myMIME.type = 'application'; + * console.log(myMIME.essence); + * // Prints: application/javascript + * console.log(String(myMIME)); + * // Prints: application/javascript;key=value + * ``` + */ + readonly essence: string; + /** + * Gets the `MIMEParams` object representing the + * parameters of the MIME. This property is read-only. See `MIMEParams` documentation for details. + */ + readonly params: MIMEParams; + /** + * The `toString()` method on the `MIMEType` object returns the serialized MIME. + * + * Because of the need for standard compliance, this method does not allow users + * to customize the serialization process of the MIME. + */ + toString(): string; + } + /** + * The `MIMEParams` API provides read and write access to the parameters of a`MIMEType`. + * @since v19.1.0, v18.13.0 + */ + export class MIMEParams { + /** + * Remove all name-value pairs whose name is `name`. + */ + delete(name: string): void; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + * Each item of the iterator is a JavaScript `Array`. The first item of the array + * is the `name`, the second item of the array is the `value`. + */ + entries(): IterableIterator<[name: string, value: string]>; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns `true` if there is at least one name-value pair whose name is `name`. + */ + has(name: string): boolean; + /** + * Returns an iterator over the names of each name-value pair. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // bar + * ``` + */ + keys(): IterableIterator; + /** + * Sets the value in the `MIMEParams` object associated with `name` to`value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value`. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * params.set('foo', 'def'); + * params.set('baz', 'xyz'); + * console.log(params.toString()); + * // Prints: foo=def;bar=1;baz=xyz + * ``` + */ + set(name: string, value: string): void; + /** + * Returns an iterator over the values of each name-value pair. + */ + values(): IterableIterator; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + */ + [Symbol.iterator]: typeof MIMEParams.prototype.entries; + } +} +declare module "util/types" { + import { KeyObject, webcrypto } from "node:crypto"; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) or + * [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * + * See also `util.types.isArrayBuffer()` and `util.types.isSharedArrayBuffer()`. + * + * ```js + * util.types.isAnyArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isAnyArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isAnyArrayBuffer(object: unknown): object is ArrayBufferLike; + /** + * Returns `true` if the value is an `arguments` object. + * + * ```js + * function foo() { + * util.types.isArgumentsObject(arguments); // Returns true + * } + * ``` + * @since v10.0.0 + */ + function isArgumentsObject(object: unknown): object is IArguments; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instance. + * This does _not_ include [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isArrayBuffer(new SharedArrayBuffer()); // Returns false + * ``` + * @since v10.0.0 + */ + function isArrayBuffer(object: unknown): object is ArrayBuffer; + /** + * Returns `true` if the value is an instance of one of the [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) views, such as typed + * array objects or [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView). Equivalent to + * [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * + * ```js + * util.types.isArrayBufferView(new Int8Array()); // true + * util.types.isArrayBufferView(Buffer.from('hello world')); // true + * util.types.isArrayBufferView(new DataView(new ArrayBuffer(16))); // true + * util.types.isArrayBufferView(new ArrayBuffer()); // false + * ``` + * @since v10.0.0 + */ + function isArrayBufferView(object: unknown): object is NodeJS.ArrayBufferView; + /** + * Returns `true` if the value is an [async function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function). + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isAsyncFunction(function foo() {}); // Returns false + * util.types.isAsyncFunction(async function foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isAsyncFunction(object: unknown): boolean; + /** + * Returns `true` if the value is a `BigInt64Array` instance. + * + * ```js + * util.types.isBigInt64Array(new BigInt64Array()); // Returns true + * util.types.isBigInt64Array(new BigUint64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isBigInt64Array(value: unknown): value is BigInt64Array; + /** + * Returns `true` if the value is a `BigUint64Array` instance. + * + * ```js + * util.types.isBigUint64Array(new BigInt64Array()); // Returns false + * util.types.isBigUint64Array(new BigUint64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isBigUint64Array(value: unknown): value is BigUint64Array; + /** + * Returns `true` if the value is a boolean object, e.g. created + * by `new Boolean()`. + * + * ```js + * util.types.isBooleanObject(false); // Returns false + * util.types.isBooleanObject(true); // Returns false + * util.types.isBooleanObject(new Boolean(false)); // Returns true + * util.types.isBooleanObject(new Boolean(true)); // Returns true + * util.types.isBooleanObject(Boolean(false)); // Returns false + * util.types.isBooleanObject(Boolean(true)); // Returns false + * ``` + * @since v10.0.0 + */ + function isBooleanObject(object: unknown): object is Boolean; + /** + * Returns `true` if the value is any boxed primitive object, e.g. created + * by `new Boolean()`, `new String()` or `Object(Symbol())`. + * + * For example: + * + * ```js + * util.types.isBoxedPrimitive(false); // Returns false + * util.types.isBoxedPrimitive(new Boolean(false)); // Returns true + * util.types.isBoxedPrimitive(Symbol('foo')); // Returns false + * util.types.isBoxedPrimitive(Object(Symbol('foo'))); // Returns true + * util.types.isBoxedPrimitive(Object(BigInt(5))); // Returns true + * ``` + * @since v10.11.0 + */ + function isBoxedPrimitive(object: unknown): object is String | Number | BigInt | Boolean | Symbol; + /** + * Returns `true` if the value is a built-in [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) instance. + * + * ```js + * const ab = new ArrayBuffer(20); + * util.types.isDataView(new DataView(ab)); // Returns true + * util.types.isDataView(new Float64Array()); // Returns false + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isDataView(object: unknown): object is DataView; + /** + * Returns `true` if the value is a built-in [`Date`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date) instance. + * + * ```js + * util.types.isDate(new Date()); // Returns true + * ``` + * @since v10.0.0 + */ + function isDate(object: unknown): object is Date; + /** + * Returns `true` if the value is a native `External` value. + * + * A native `External` value is a special type of object that contains a + * raw C++ pointer (`void*`) for access from native code, and has no other + * properties. Such objects are created either by Node.js internals or native + * addons. In JavaScript, they are [frozen](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze) objects with a`null` prototype. + * + * ```c + * #include + * #include + * napi_value result; + * static napi_value MyNapi(napi_env env, napi_callback_info info) { + * int* raw = (int*) malloc(1024); + * napi_status status = napi_create_external(env, (void*) raw, NULL, NULL, &result); + * if (status != napi_ok) { + * napi_throw_error(env, NULL, "napi_create_external failed"); + * return NULL; + * } + * return result; + * } + * ... + * DECLARE_NAPI_PROPERTY("myNapi", MyNapi) + * ... + * ``` + * + * ```js + * const native = require('napi_addon.node'); + * const data = native.myNapi(); + * util.types.isExternal(data); // returns true + * util.types.isExternal(0); // returns false + * util.types.isExternal(new String('foo')); // returns false + * ``` + * + * For further information on `napi_create_external`, refer to `napi_create_external()`. + * @since v10.0.0 + */ + function isExternal(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array) instance. + * + * ```js + * util.types.isFloat32Array(new ArrayBuffer()); // Returns false + * util.types.isFloat32Array(new Float32Array()); // Returns true + * util.types.isFloat32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isFloat32Array(object: unknown): object is Float32Array; + /** + * Returns `true` if the value is a built-in [`Float64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float64Array) instance. + * + * ```js + * util.types.isFloat64Array(new ArrayBuffer()); // Returns false + * util.types.isFloat64Array(new Uint8Array()); // Returns false + * util.types.isFloat64Array(new Float64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isFloat64Array(object: unknown): object is Float64Array; + /** + * Returns `true` if the value is a generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isGeneratorFunction(function foo() {}); // Returns false + * util.types.isGeneratorFunction(function* foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorFunction(object: unknown): object is GeneratorFunction; + /** + * Returns `true` if the value is a generator object as returned from a + * built-in generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * function* foo() {} + * const generator = foo(); + * util.types.isGeneratorObject(generator); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorObject(object: unknown): object is Generator; + /** + * Returns `true` if the value is a built-in [`Int8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int8Array) instance. + * + * ```js + * util.types.isInt8Array(new ArrayBuffer()); // Returns false + * util.types.isInt8Array(new Int8Array()); // Returns true + * util.types.isInt8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt8Array(object: unknown): object is Int8Array; + /** + * Returns `true` if the value is a built-in [`Int16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int16Array) instance. + * + * ```js + * util.types.isInt16Array(new ArrayBuffer()); // Returns false + * util.types.isInt16Array(new Int16Array()); // Returns true + * util.types.isInt16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt16Array(object: unknown): object is Int16Array; + /** + * Returns `true` if the value is a built-in [`Int32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int32Array) instance. + * + * ```js + * util.types.isInt32Array(new ArrayBuffer()); // Returns false + * util.types.isInt32Array(new Int32Array()); // Returns true + * util.types.isInt32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt32Array(object: unknown): object is Int32Array; + /** + * Returns `true` if the value is a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * util.types.isMap(new Map()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMap( + object: T | {}, + ): object is T extends ReadonlyMap ? (unknown extends T ? never : ReadonlyMap) + : Map; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * const map = new Map(); + * util.types.isMapIterator(map.keys()); // Returns true + * util.types.isMapIterator(map.values()); // Returns true + * util.types.isMapIterator(map.entries()); // Returns true + * util.types.isMapIterator(map[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMapIterator(object: unknown): boolean; + /** + * Returns `true` if the value is an instance of a [Module Namespace Object](https://tc39.github.io/ecma262/#sec-module-namespace-exotic-objects). + * + * ```js + * import * as ns from './a.js'; + * + * util.types.isModuleNamespaceObject(ns); // Returns true + * ``` + * @since v10.0.0 + */ + function isModuleNamespaceObject(value: unknown): boolean; + /** + * Returns `true` if the value was returned by the constructor of a [built-in `Error` type](https://tc39.es/ecma262/#sec-error-objects). + * + * ```js + * console.log(util.types.isNativeError(new Error())); // true + * console.log(util.types.isNativeError(new TypeError())); // true + * console.log(util.types.isNativeError(new RangeError())); // true + * ``` + * + * Subclasses of the native error types are also native errors: + * + * ```js + * class MyError extends Error {} + * console.log(util.types.isNativeError(new MyError())); // true + * ``` + * + * A value being `instanceof` a native error class is not equivalent to `isNativeError()`returning `true` for that value. `isNativeError()` returns `true` for errors + * which come from a different [realm](https://tc39.es/ecma262/#realm) while `instanceof Error` returns `false`for these errors: + * + * ```js + * const vm = require('node:vm'); + * const context = vm.createContext({}); + * const myError = vm.runInContext('new Error()', context); + * console.log(util.types.isNativeError(myError)); // true + * console.log(myError instanceof Error); // false + * ``` + * + * Conversely, `isNativeError()` returns `false` for all objects which were not + * returned by the constructor of a native error. That includes values + * which are `instanceof` native errors: + * + * ```js + * const myError = { __proto__: Error.prototype }; + * console.log(util.types.isNativeError(myError)); // false + * console.log(myError instanceof Error); // true + * ``` + * @since v10.0.0 + */ + function isNativeError(object: unknown): object is Error; + /** + * Returns `true` if the value is a number object, e.g. created + * by `new Number()`. + * + * ```js + * util.types.isNumberObject(0); // Returns false + * util.types.isNumberObject(new Number(0)); // Returns true + * ``` + * @since v10.0.0 + */ + function isNumberObject(object: unknown): object is Number; + /** + * Returns `true` if the value is a built-in [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise). + * + * ```js + * util.types.isPromise(Promise.resolve(42)); // Returns true + * ``` + * @since v10.0.0 + */ + function isPromise(object: unknown): object is Promise; + /** + * Returns `true` if the value is a [`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) instance. + * + * ```js + * const target = {}; + * const proxy = new Proxy(target, {}); + * util.types.isProxy(target); // Returns false + * util.types.isProxy(proxy); // Returns true + * ``` + * @since v10.0.0 + */ + function isProxy(object: unknown): boolean; + /** + * Returns `true` if the value is a regular expression object. + * + * ```js + * util.types.isRegExp(/abc/); // Returns true + * util.types.isRegExp(new RegExp('abc')); // Returns true + * ``` + * @since v10.0.0 + */ + function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the value is a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * util.types.isSet(new Set()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSet( + object: T | {}, + ): object is T extends ReadonlySet ? (unknown extends T ? never : ReadonlySet) : Set; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * const set = new Set(); + * util.types.isSetIterator(set.keys()); // Returns true + * util.types.isSetIterator(set.values()); // Returns true + * util.types.isSetIterator(set.entries()); // Returns true + * util.types.isSetIterator(set[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSetIterator(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * This does _not_ include [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isSharedArrayBuffer(new ArrayBuffer()); // Returns false + * util.types.isSharedArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSharedArrayBuffer(object: unknown): object is SharedArrayBuffer; + /** + * Returns `true` if the value is a string object, e.g. created + * by `new String()`. + * + * ```js + * util.types.isStringObject('foo'); // Returns false + * util.types.isStringObject(new String('foo')); // Returns true + * ``` + * @since v10.0.0 + */ + function isStringObject(object: unknown): object is String; + /** + * Returns `true` if the value is a symbol object, created + * by calling `Object()` on a `Symbol` primitive. + * + * ```js + * const symbol = Symbol('foo'); + * util.types.isSymbolObject(symbol); // Returns false + * util.types.isSymbolObject(Object(symbol)); // Returns true + * ``` + * @since v10.0.0 + */ + function isSymbolObject(object: unknown): object is Symbol; + /** + * Returns `true` if the value is a built-in [`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray) instance. + * + * ```js + * util.types.isTypedArray(new ArrayBuffer()); // Returns false + * util.types.isTypedArray(new Uint8Array()); // Returns true + * util.types.isTypedArray(new Float64Array()); // Returns true + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isTypedArray(object: unknown): object is NodeJS.TypedArray; + /** + * Returns `true` if the value is a built-in [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) instance. + * + * ```js + * util.types.isUint8Array(new ArrayBuffer()); // Returns false + * util.types.isUint8Array(new Uint8Array()); // Returns true + * util.types.isUint8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8Array(object: unknown): object is Uint8Array; + /** + * Returns `true` if the value is a built-in [`Uint8ClampedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray) instance. + * + * ```js + * util.types.isUint8ClampedArray(new ArrayBuffer()); // Returns false + * util.types.isUint8ClampedArray(new Uint8ClampedArray()); // Returns true + * util.types.isUint8ClampedArray(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8ClampedArray(object: unknown): object is Uint8ClampedArray; + /** + * Returns `true` if the value is a built-in [`Uint16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array) instance. + * + * ```js + * util.types.isUint16Array(new ArrayBuffer()); // Returns false + * util.types.isUint16Array(new Uint16Array()); // Returns true + * util.types.isUint16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint16Array(object: unknown): object is Uint16Array; + /** + * Returns `true` if the value is a built-in [`Uint32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array) instance. + * + * ```js + * util.types.isUint32Array(new ArrayBuffer()); // Returns false + * util.types.isUint32Array(new Uint32Array()); // Returns true + * util.types.isUint32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint32Array(object: unknown): object is Uint32Array; + /** + * Returns `true` if the value is a built-in [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) instance. + * + * ```js + * util.types.isWeakMap(new WeakMap()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakMap(object: unknown): object is WeakMap; + /** + * Returns `true` if the value is a built-in [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) instance. + * + * ```js + * util.types.isWeakSet(new WeakSet()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakSet(object: unknown): object is WeakSet; + /** + * Returns `true` if `value` is a `KeyObject`, `false` otherwise. + * @since v16.2.0 + */ + function isKeyObject(object: unknown): object is KeyObject; + /** + * Returns `true` if `value` is a `CryptoKey`, `false` otherwise. + * @since v16.2.0 + */ + function isCryptoKey(object: unknown): object is webcrypto.CryptoKey; +} +declare module "node:util" { + export * from "util"; +} +declare module "node:util/types" { + export * from "util/types"; +} diff --git a/node_modules/@types/node/v8.d.ts b/node_modules/@types/node/v8.d.ts new file mode 100644 index 0000000..b2aeee2 --- /dev/null +++ b/node_modules/@types/node/v8.d.ts @@ -0,0 +1,764 @@ +/** + * The `node:v8` module exposes APIs that are specific to the version of [V8](https://developers.google.com/v8/) built into the Node.js binary. It can be accessed using: + * + * ```js + * const v8 = require('node:v8'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.11.0/lib/v8.js) + */ +declare module "v8" { + import { Readable } from "node:stream"; + interface HeapSpaceInfo { + space_name: string; + space_size: number; + space_used_size: number; + space_available_size: number; + physical_space_size: number; + } + // ** Signifies if the --zap_code_space option is enabled or not. 1 == enabled, 0 == disabled. */ + type DoesZapCodeSpaceFlag = 0 | 1; + interface HeapInfo { + total_heap_size: number; + total_heap_size_executable: number; + total_physical_size: number; + total_available_size: number; + used_heap_size: number; + heap_size_limit: number; + malloced_memory: number; + peak_malloced_memory: number; + does_zap_garbage: DoesZapCodeSpaceFlag; + number_of_native_contexts: number; + number_of_detached_contexts: number; + total_global_handles_size: number; + used_global_handles_size: number; + external_memory: number; + } + interface HeapCodeStatistics { + code_and_metadata_size: number; + bytecode_and_metadata_size: number; + external_script_source_size: number; + } + interface HeapSnapshotOptions { + /** + * If true, expose internals in the heap snapshot. + * @default false + */ + exposeInternals?: boolean; + /** + * If true, expose numeric values in artificial fields. + * @default false + */ + exposeNumericValues?: boolean; + } + /** + * Returns an integer representing a version tag derived from the V8 version, + * command-line flags, and detected CPU features. This is useful for determining + * whether a `vm.Script` `cachedData` buffer is compatible with this instance + * of V8. + * + * ```js + * console.log(v8.cachedDataVersionTag()); // 3947234607 + * // The value returned by v8.cachedDataVersionTag() is derived from the V8 + * // version, command-line flags, and detected CPU features. Test that the value + * // does indeed update when flags are toggled. + * v8.setFlagsFromString('--allow_natives_syntax'); + * console.log(v8.cachedDataVersionTag()); // 183726201 + * ``` + * @since v8.0.0 + */ + function cachedDataVersionTag(): number; + /** + * Returns an object with the following properties: + * + * `does_zap_garbage` is a 0/1 boolean, which signifies whether the`--zap_code_space` option is enabled or not. This makes V8 overwrite heap + * garbage with a bit pattern. The RSS footprint (resident set size) gets bigger + * because it continuously touches all heap pages and that makes them less likely + * to get swapped out by the operating system. + * + * `number_of_native_contexts` The value of native\_context is the number of the + * top-level contexts currently active. Increase of this number over time indicates + * a memory leak. + * + * `number_of_detached_contexts` The value of detached\_context is the number + * of contexts that were detached and not yet garbage collected. This number + * being non-zero indicates a potential memory leak. + * + * `total_global_handles_size` The value of total\_global\_handles\_size is the + * total memory size of V8 global handles. + * + * `used_global_handles_size` The value of used\_global\_handles\_size is the + * used memory size of V8 global handles. + * + * `external_memory` The value of external\_memory is the memory size of array + * buffers and external strings. + * + * ```js + * { + * total_heap_size: 7326976, + * total_heap_size_executable: 4194304, + * total_physical_size: 7326976, + * total_available_size: 1152656, + * used_heap_size: 3476208, + * heap_size_limit: 1535115264, + * malloced_memory: 16384, + * peak_malloced_memory: 1127496, + * does_zap_garbage: 0, + * number_of_native_contexts: 1, + * number_of_detached_contexts: 0, + * total_global_handles_size: 8192, + * used_global_handles_size: 3296, + * external_memory: 318824 + * } + * ``` + * @since v1.0.0 + */ + function getHeapStatistics(): HeapInfo; + /** + * Returns statistics about the V8 heap spaces, i.e. the segments which make up + * the V8 heap. Neither the ordering of heap spaces, nor the availability of a + * heap space can be guaranteed as the statistics are provided via the + * V8 [`GetHeapSpaceStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#ac673576f24fdc7a33378f8f57e1d13a4) function and may change from one V8 version to the + * next. + * + * The value returned is an array of objects containing the following properties: + * + * ```json + * [ + * { + * "space_name": "new_space", + * "space_size": 2063872, + * "space_used_size": 951112, + * "space_available_size": 80824, + * "physical_space_size": 2063872 + * }, + * { + * "space_name": "old_space", + * "space_size": 3090560, + * "space_used_size": 2493792, + * "space_available_size": 0, + * "physical_space_size": 3090560 + * }, + * { + * "space_name": "code_space", + * "space_size": 1260160, + * "space_used_size": 644256, + * "space_available_size": 960, + * "physical_space_size": 1260160 + * }, + * { + * "space_name": "map_space", + * "space_size": 1094160, + * "space_used_size": 201608, + * "space_available_size": 0, + * "physical_space_size": 1094160 + * }, + * { + * "space_name": "large_object_space", + * "space_size": 0, + * "space_used_size": 0, + * "space_available_size": 1490980608, + * "physical_space_size": 0 + * } + * ] + * ``` + * @since v6.0.0 + */ + function getHeapSpaceStatistics(): HeapSpaceInfo[]; + /** + * The `v8.setFlagsFromString()` method can be used to programmatically set + * V8 command-line flags. This method should be used with care. Changing settings + * after the VM has started may result in unpredictable behavior, including + * crashes and data loss; or it may simply do nothing. + * + * The V8 options available for a version of Node.js may be determined by running`node --v8-options`. + * + * Usage: + * + * ```js + * // Print GC events to stdout for one minute. + * const v8 = require('node:v8'); + * v8.setFlagsFromString('--trace_gc'); + * setTimeout(() => { v8.setFlagsFromString('--notrace_gc'); }, 60e3); + * ``` + * @since v1.0.0 + */ + function setFlagsFromString(flags: string): void; + /** + * Generates a snapshot of the current V8 heap and returns a Readable + * Stream that may be used to read the JSON serialized representation. + * This JSON stream format is intended to be used with tools such as + * Chrome DevTools. The JSON schema is undocumented and specific to the + * V8 engine. Therefore, the schema may change from one version of V8 to the next. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * // Print heap snapshot to the console + * const v8 = require('node:v8'); + * const stream = v8.getHeapSnapshot(); + * stream.pipe(process.stdout); + * ``` + * @since v11.13.0 + * @return A Readable containing the V8 heap snapshot. + */ + function getHeapSnapshot(options?: HeapSnapshotOptions): Readable; + /** + * Generates a snapshot of the current V8 heap and writes it to a JSON + * file. This file is intended to be used with tools such as Chrome + * DevTools. The JSON schema is undocumented and specific to the V8 + * engine, and may change from one version of V8 to the next. + * + * A heap snapshot is specific to a single V8 isolate. When using `worker threads`, a heap snapshot generated from the main thread will + * not contain any information about the workers, and vice versa. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * const { writeHeapSnapshot } = require('node:v8'); + * const { + * Worker, + * isMainThread, + * parentPort, + * } = require('node:worker_threads'); + * + * if (isMainThread) { + * const worker = new Worker(__filename); + * + * worker.once('message', (filename) => { + * console.log(`worker heapdump: ${filename}`); + * // Now get a heapdump for the main thread. + * console.log(`main thread heapdump: ${writeHeapSnapshot()}`); + * }); + * + * // Tell the worker to create a heapdump. + * worker.postMessage('heapdump'); + * } else { + * parentPort.once('message', (message) => { + * if (message === 'heapdump') { + * // Generate a heapdump for the worker + * // and return the filename to the parent. + * parentPort.postMessage(writeHeapSnapshot()); + * } + * }); + * } + * ``` + * @since v11.13.0 + * @param filename The file path where the V8 heap snapshot is to be saved. If not specified, a file name with the pattern `'Heap-${yyyymmdd}-${hhmmss}-${pid}-${thread_id}.heapsnapshot'` will be + * generated, where `{pid}` will be the PID of the Node.js process, `{thread_id}` will be `0` when `writeHeapSnapshot()` is called from the main Node.js thread or the id of a + * worker thread. + * @return The filename where the snapshot was saved. + */ + function writeHeapSnapshot(filename?: string, options?: HeapSnapshotOptions): string; + /** + * Get statistics about code and its metadata in the heap, see + * V8 [`GetHeapCodeAndMetadataStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#a6079122af17612ef54ef3348ce170866) API. Returns an object with the + * following properties: + * + * ```js + * { + * code_and_metadata_size: 212208, + * bytecode_and_metadata_size: 161368, + * external_script_source_size: 1410794, + * cpu_profiler_metadata_size: 0, + * } + * ``` + * @since v12.8.0 + */ + function getHeapCodeStatistics(): HeapCodeStatistics; + /** + * @since v8.0.0 + */ + class Serializer { + /** + * Writes out a header, which includes the serialization format version. + */ + writeHeader(): void; + /** + * Serializes a JavaScript value and adds the serialized representation to the + * internal buffer. + * + * This throws an error if `value` cannot be serialized. + */ + writeValue(val: any): boolean; + /** + * Returns the stored internal buffer. This serializer should not be used once + * the buffer is released. Calling this method results in undefined behavior + * if a previous write has failed. + */ + releaseBuffer(): Buffer; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the deserializing context to `deserializer.transferArrayBuffer()`. + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Write a raw 32-bit unsigned integer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint32(value: number): void; + /** + * Write a raw 64-bit unsigned integer, split into high and low 32-bit parts. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint64(hi: number, lo: number): void; + /** + * Write a JS `number` value. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeDouble(value: number): void; + /** + * Write raw bytes into the serializer's internal buffer. The deserializer + * will require a way to compute the length of the buffer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeRawBytes(buffer: NodeJS.TypedArray): void; + } + /** + * A subclass of `Serializer` that serializes `TypedArray`(in particular `Buffer`) and `DataView` objects as host objects, and only + * stores the part of their underlying `ArrayBuffer`s that they are referring to. + * @since v8.0.0 + */ + class DefaultSerializer extends Serializer {} + /** + * @since v8.0.0 + */ + class Deserializer { + constructor(data: NodeJS.TypedArray); + /** + * Reads and validates a header (including the format version). + * May, for example, reject an invalid or unsupported wire format. In that case, + * an `Error` is thrown. + */ + readHeader(): boolean; + /** + * Deserializes a JavaScript value from the buffer and returns it. + */ + readValue(): any; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the serializing context to `serializer.transferArrayBuffer()` (or return the `id` from `serializer._getSharedArrayBufferId()` in the case of + * `SharedArrayBuffer`s). + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Reads the underlying wire format version. Likely mostly to be useful to + * legacy code reading old wire format versions. May not be called before`.readHeader()`. + */ + getWireFormatVersion(): number; + /** + * Read a raw 32-bit unsigned integer and return it. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint32(): number; + /** + * Read a raw 64-bit unsigned integer and return it as an array `[hi, lo]`with two 32-bit unsigned integer entries. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint64(): [number, number]; + /** + * Read a JS `number` value. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readDouble(): number; + /** + * Read raw bytes from the deserializer's internal buffer. The `length` parameter + * must correspond to the length of the buffer that was passed to `serializer.writeRawBytes()`. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readRawBytes(length: number): Buffer; + } + /** + * A subclass of `Deserializer` corresponding to the format written by `DefaultSerializer`. + * @since v8.0.0 + */ + class DefaultDeserializer extends Deserializer {} + /** + * Uses a `DefaultSerializer` to serialize `value` into a buffer. + * + * `ERR_BUFFER_TOO_LARGE` will be thrown when trying to + * serialize a huge object which requires buffer + * larger than `buffer.constants.MAX_LENGTH`. + * @since v8.0.0 + */ + function serialize(value: any): Buffer; + /** + * Uses a `DefaultDeserializer` with default options to read a JS value + * from a buffer. + * @since v8.0.0 + * @param buffer A buffer returned by {@link serialize}. + */ + function deserialize(buffer: NodeJS.TypedArray): any; + /** + * The `v8.takeCoverage()` method allows the user to write the coverage started by `NODE_V8_COVERAGE` to disk on demand. This method can be invoked multiple + * times during the lifetime of the process. Each time the execution counter will + * be reset and a new coverage report will be written to the directory specified + * by `NODE_V8_COVERAGE`. + * + * When the process is about to exit, one last coverage will still be written to + * disk unless {@link stopCoverage} is invoked before the process exits. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function takeCoverage(): void; + /** + * The `v8.stopCoverage()` method allows the user to stop the coverage collection + * started by `NODE_V8_COVERAGE`, so that V8 can release the execution count + * records and optimize code. This can be used in conjunction with {@link takeCoverage} if the user wants to collect the coverage on demand. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function stopCoverage(): void; + /** + * The API is a no-op if `--heapsnapshot-near-heap-limit` is already set from the command line or the API is called more than once. + * `limit` must be a positive integer. See [`--heapsnapshot-near-heap-limit`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--heapsnapshot-near-heap-limitmax_count) for more information. + * @experimental + * @since v18.10.0, v16.18.0 + */ + function setHeapSnapshotNearHeapLimit(limit: number): void; + /** + * This API collects GC data in current thread. + * @since v19.6.0, v18.15.0 + */ + class GCProfiler { + /** + * Start collecting GC data. + * @since v19.6.0, v18.15.0 + */ + start(): void; + /** + * Stop collecting GC data and return an object.The content of object + * is as follows. + * + * ```json + * { + * "version": 1, + * "startTime": 1674059033862, + * "statistics": [ + * { + * "gcType": "Scavenge", + * "beforeGC": { + * "heapStatistics": { + * "totalHeapSize": 5005312, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5226496, + * "totalAvailableSize": 4341325216, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4883840, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * }, + * "cost": 1574.14, + * "afterGC": { + * "heapStatistics": { + * "totalHeapSize": 6053888, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5500928, + * "totalAvailableSize": 4341101384, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4059096, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * } + * } + * ], + * "endTime": 1674059036865 + * } + * ``` + * + * Here's an example. + * + * ```js + * const { GCProfiler } = require('v8'); + * const profiler = new GCProfiler(); + * profiler.start(); + * setTimeout(() => { + * console.log(profiler.stop()); + * }, 1000); + * ``` + * @since v19.6.0, v18.15.0 + */ + stop(): GCProfilerResult; + } + interface GCProfilerResult { + version: number; + startTime: number; + endTime: number; + statistics: Array<{ + gcType: string; + cost: number; + beforeGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + afterGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + }>; + } + interface HeapStatistics { + totalHeapSize: number; + totalHeapSizeExecutable: number; + totalPhysicalSize: number; + totalAvailableSize: number; + totalGlobalHandlesSize: number; + usedGlobalHandlesSize: number; + usedHeapSize: number; + heapSizeLimit: number; + mallocedMemory: number; + externalMemory: number; + peakMallocedMemory: number; + } + interface HeapSpaceStatistics { + spaceName: string; + spaceSize: number; + spaceUsedSize: number; + spaceAvailableSize: number; + physicalSpaceSize: number; + } + /** + * Called when a promise is constructed. This does not mean that corresponding before/after events will occur, only that the possibility exists. This will + * happen if a promise is created without ever getting a continuation. + * @since v17.1.0, v16.14.0 + * @param promise The promise being created. + * @param parent The promise continued from, if applicable. + */ + interface Init { + (promise: Promise, parent: Promise): void; + } + /** + * Called before a promise continuation executes. This can be in the form of `then()`, `catch()`, or `finally()` handlers or an await resuming. + * + * The before callback will be called 0 to N times. The before callback will typically be called 0 times if no continuation was ever made for the promise. + * The before callback may be called many times in the case where many continuations have been made from the same promise. + * @since v17.1.0, v16.14.0 + */ + interface Before { + (promise: Promise): void; + } + /** + * Called immediately after a promise continuation executes. This may be after a `then()`, `catch()`, or `finally()` handler or before an await after another await. + * @since v17.1.0, v16.14.0 + */ + interface After { + (promise: Promise): void; + } + /** + * Called when the promise receives a resolution or rejection value. This may occur synchronously in the case of {@link Promise.resolve()} or + * {@link Promise.reject()}. + * @since v17.1.0, v16.14.0 + */ + interface Settled { + (promise: Promise): void; + } + /** + * Key events in the lifetime of a promise have been categorized into four areas: creation of a promise, before/after a continuation handler is called or + * around an await, and when the promise resolves or rejects. + * + * Because promises are asynchronous resources whose lifecycle is tracked via the promise hooks mechanism, the `init()`, `before()`, `after()`, and + * `settled()` callbacks must not be async functions as they create more promises which would produce an infinite loop. + * @since v17.1.0, v16.14.0 + */ + interface HookCallbacks { + init?: Init; + before?: Before; + after?: After; + settled?: Settled; + } + interface PromiseHooks { + /** + * The `init` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param init The {@link Init | `init` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onInit: (init: Init) => Function; + /** + * The `settled` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param settled The {@link Settled | `settled` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onSettled: (settled: Settled) => Function; + /** + * The `before` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param before The {@link Before | `before` callback} to call before a promise continuation executes. + * @return Call to stop the hook. + */ + onBefore: (before: Before) => Function; + /** + * The `after` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param after The {@link After | `after` callback} to call after a promise continuation executes. + * @return Call to stop the hook. + */ + onAfter: (after: After) => Function; + /** + * Registers functions to be called for different lifetime events of each promise. + * The callbacks `init()`/`before()`/`after()`/`settled()` are called for the respective events during a promise's lifetime. + * All callbacks are optional. For example, if only promise creation needs to be tracked, then only the init callback needs to be passed. + * The hook callbacks must be plain functions. Providing async functions will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param callbacks The {@link HookCallbacks | Hook Callbacks} to register + * @return Used for disabling hooks + */ + createHook: (callbacks: HookCallbacks) => Function; + } + /** + * The `promiseHooks` interface can be used to track promise lifecycle events. + * @since v17.1.0, v16.14.0 + */ + const promiseHooks: PromiseHooks; + type StartupSnapshotCallbackFn = (args: any) => any; + interface StartupSnapshot { + /** + * Add a callback that will be called when the Node.js instance is about to get serialized into a snapshot and exit. + * This can be used to release resources that should not or cannot be serialized or to convert user data into a form more suitable for serialization. + * @since v18.6.0, v16.17.0 + */ + addSerializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Add a callback that will be called when the Node.js instance is deserialized from a snapshot. + * The `callback` and the `data` (if provided) will be serialized into the snapshot, they can be used to re-initialize the state of the application or + * to re-acquire resources that the application needs when the application is restarted from the snapshot. + * @since v18.6.0, v16.17.0 + */ + addDeserializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * This sets the entry point of the Node.js application when it is deserialized from a snapshot. This can be called only once in the snapshot building script. + * If called, the deserialized application no longer needs an additional entry point script to start up and will simply invoke the callback along with the deserialized + * data (if provided), otherwise an entry point script still needs to be provided to the deserialized application. + * @since v18.6.0, v16.17.0 + */ + setDeserializeMainFunction(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Returns true if the Node.js instance is run to build a snapshot. + * @since v18.6.0, v16.17.0 + */ + isBuildingSnapshot(): boolean; + } + /** + * The `v8.startupSnapshot` interface can be used to add serialization and deserialization hooks for custom startup snapshots. + * + * ```bash + * $ node --snapshot-blob snapshot.blob --build-snapshot entry.js + * # This launches a process with the snapshot + * $ node --snapshot-blob snapshot.blob + * ``` + * + * In the example above, `entry.js` can use methods from the `v8.startupSnapshot` interface to specify how to save information for custom objects + * in the snapshot during serialization and how the information can be used to synchronize these objects during deserialization of the snapshot. + * For example, if the `entry.js` contains the following script: + * + * ```js + * 'use strict'; + * + * const fs = require('node:fs'); + * const zlib = require('node:zlib'); + * const path = require('node:path'); + * const assert = require('node:assert'); + * + * const v8 = require('node:v8'); + * + * class BookShelf { + * storage = new Map(); + * + * // Reading a series of files from directory and store them into storage. + * constructor(directory, books) { + * for (const book of books) { + * this.storage.set(book, fs.readFileSync(path.join(directory, book))); + * } + * } + * + * static compressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gzipSync(content)); + * } + * } + * + * static decompressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gunzipSync(content)); + * } + * } + * } + * + * // __dirname here is where the snapshot script is placed + * // during snapshot building time. + * const shelf = new BookShelf(__dirname, [ + * 'book1.en_US.txt', + * 'book1.es_ES.txt', + * 'book2.zh_CN.txt', + * ]); + * + * assert(v8.startupSnapshot.isBuildingSnapshot()); + * // On snapshot serialization, compress the books to reduce size. + * v8.startupSnapshot.addSerializeCallback(BookShelf.compressAll, shelf); + * // On snapshot deserialization, decompress the books. + * v8.startupSnapshot.addDeserializeCallback(BookShelf.decompressAll, shelf); + * v8.startupSnapshot.setDeserializeMainFunction((shelf) => { + * // process.env and process.argv are refreshed during snapshot + * // deserialization. + * const lang = process.env.BOOK_LANG || 'en_US'; + * const book = process.argv[1]; + * const name = `${book}.${lang}.txt`; + * console.log(shelf.storage.get(name)); + * }, shelf); + * ``` + * + * The resulted binary will get print the data deserialized from the snapshot during start up, using the refreshed `process.env` and `process.argv` of the launched process: + * + * ```bash + * $ BOOK_LANG=es_ES node --snapshot-blob snapshot.blob book1 + * # Prints content of book1.es_ES.txt deserialized from the snapshot. + * ``` + * + * Currently the application deserialized from a user-land snapshot cannot be snapshotted again, so these APIs are only available to applications that are not deserialized from a user-land snapshot. + * + * @experimental + * @since v18.6.0, v16.17.0 + */ + const startupSnapshot: StartupSnapshot; +} +declare module "node:v8" { + export * from "v8"; +} diff --git a/node_modules/@types/node/vm.d.ts b/node_modules/@types/node/vm.d.ts new file mode 100644 index 0000000..3a310cc --- /dev/null +++ b/node_modules/@types/node/vm.d.ts @@ -0,0 +1,903 @@ +/** + * The `node:vm` module enables compiling and running code within V8 Virtual + * Machine contexts. + * + * **The `node:vm` module is not a security** + * **mechanism. Do not use it to run untrusted code.** + * + * JavaScript code can be compiled and run immediately or + * compiled, saved, and run later. + * + * A common use case is to run the code in a different V8 Context. This means + * invoked code has a different global object than the invoking code. + * + * One can provide the context by `contextifying` an + * object. The invoked code treats any property in the context like a + * global variable. Any changes to global variables caused by the invoked + * code are reflected in the context object. + * + * ```js + * const vm = require('node:vm'); + * + * const x = 1; + * + * const context = { x: 2 }; + * vm.createContext(context); // Contextify the object. + * + * const code = 'x += 40; var y = 17;'; + * // `x` and `y` are global variables in the context. + * // Initially, x has the value 2 because that is the value of context.x. + * vm.runInContext(code, context); + * + * console.log(context.x); // 42 + * console.log(context.y); // 17 + * + * console.log(x); // 1; y is not defined. + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/vm.js) + */ +declare module "vm" { + import { ImportAttributes } from "node:module"; + interface Context extends NodeJS.Dict {} + interface BaseOptions { + /** + * Specifies the filename used in stack traces produced by this script. + * Default: `''`. + */ + filename?: string | undefined; + /** + * Specifies the line number offset that is displayed in stack traces produced by this script. + * Default: `0`. + */ + lineOffset?: number | undefined; + /** + * Specifies the column number offset that is displayed in stack traces produced by this script. + * @default 0 + */ + columnOffset?: number | undefined; + } + interface ScriptOptions extends BaseOptions { + /** + * V8's code cache data for the supplied source. + */ + cachedData?: Buffer | NodeJS.ArrayBufferView | undefined; + /** @deprecated in favor of `script.createCachedData()` */ + produceCachedData?: boolean | undefined; + /** + * Called during evaluation of this module when `import()` is called. + * If this option is not specified, calls to `import()` will reject with `ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING`. + */ + importModuleDynamically?: + | ((specifier: string, script: Script, importAttributes: ImportAttributes) => Module) + | undefined; + } + interface RunningScriptOptions extends BaseOptions { + /** + * When `true`, if an `Error` occurs while compiling the `code`, the line of code causing the error is attached to the stack trace. + * Default: `true`. + */ + displayErrors?: boolean | undefined; + /** + * Specifies the number of milliseconds to execute code before terminating execution. + * If execution is terminated, an `Error` will be thrown. This value must be a strictly positive integer. + */ + timeout?: number | undefined; + /** + * If `true`, the execution will be terminated when `SIGINT` (Ctrl+C) is received. + * Existing handlers for the event that have been attached via `process.on('SIGINT')` will be disabled during script execution, but will continue to work after that. + * If execution is terminated, an `Error` will be thrown. + * Default: `false`. + */ + breakOnSigint?: boolean | undefined; + } + interface RunningScriptInNewContextOptions extends RunningScriptOptions { + /** + * Human-readable name of the newly created context. + */ + contextName?: CreateContextOptions["name"]; + /** + * Origin corresponding to the newly created context for display purposes. The origin should be formatted like a URL, + * but with only the scheme, host, and port (if necessary), like the value of the `url.origin` property of a `URL` object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + */ + contextOrigin?: CreateContextOptions["origin"]; + contextCodeGeneration?: CreateContextOptions["codeGeneration"]; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: CreateContextOptions["microtaskMode"]; + } + interface RunningCodeOptions extends RunningScriptOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface RunningCodeInNewContextOptions extends RunningScriptInNewContextOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface CompileFunctionOptions extends BaseOptions { + /** + * Provides an optional data with V8's code cache data for the supplied source. + */ + cachedData?: Buffer | undefined; + /** + * Specifies whether to produce new cache data. + * Default: `false`, + */ + produceCachedData?: boolean | undefined; + /** + * The sandbox/context in which the said function should be compiled in. + */ + parsingContext?: Context | undefined; + /** + * An array containing a collection of context extensions (objects wrapping the current scope) to be applied while compiling + */ + contextExtensions?: Object[] | undefined; + } + interface CreateContextOptions { + /** + * Human-readable name of the newly created context. + * @default 'VM Context i' Where i is an ascending numerical index of the created context. + */ + name?: string | undefined; + /** + * Corresponds to the newly created context for display purposes. + * The origin should be formatted like a `URL`, but with only the scheme, host, and port (if necessary), + * like the value of the `url.origin` property of a URL object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + * @default '' + */ + origin?: string | undefined; + codeGeneration?: + | { + /** + * If set to false any calls to eval or function constructors (Function, GeneratorFunction, etc) + * will throw an EvalError. + * @default true + */ + strings?: boolean | undefined; + /** + * If set to false any attempt to compile a WebAssembly module will throw a WebAssembly.CompileError. + * @default true + */ + wasm?: boolean | undefined; + } + | undefined; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: "afterEvaluate" | undefined; + } + type MeasureMemoryMode = "summary" | "detailed"; + interface MeasureMemoryOptions { + /** + * @default 'summary' + */ + mode?: MeasureMemoryMode | undefined; + /** + * @default 'default' + */ + execution?: "default" | "eager" | undefined; + } + interface MemoryMeasurement { + total: { + jsMemoryEstimate: number; + jsMemoryRange: [number, number]; + }; + } + /** + * Instances of the `vm.Script` class contain precompiled scripts that can be + * executed in specific contexts. + * @since v0.3.1 + */ + class Script { + constructor(code: string, options?: ScriptOptions | string); + /** + * Runs the compiled code contained by the `vm.Script` object within the given`contextifiedObject` and returns the result. Running code does not have access + * to local scope. + * + * The following example compiles code that increments a global variable, sets + * the value of another global variable, then execute the code multiple times. + * The globals are contained in the `context` object. + * + * ```js + * const vm = require('node:vm'); + * + * const context = { + * animal: 'cat', + * count: 2, + * }; + * + * const script = new vm.Script('count += 1; name = "kitty";'); + * + * vm.createContext(context); + * for (let i = 0; i < 10; ++i) { + * script.runInContext(context); + * } + * + * console.log(context); + * // Prints: { animal: 'cat', count: 12, name: 'kitty' } + * ``` + * + * Using the `timeout` or `breakOnSigint` options will result in new event loops + * and corresponding threads being started, which have a non-zero performance + * overhead. + * @since v0.3.1 + * @param contextifiedObject A `contextified` object as returned by the `vm.createContext()` method. + * @return the result of the very last statement executed in the script. + */ + runInContext(contextifiedObject: Context, options?: RunningScriptOptions): any; + /** + * First contextifies the given `contextObject`, runs the compiled code contained + * by the `vm.Script` object within the created context, and returns the result. + * Running code does not have access to local scope. + * + * The following example compiles code that sets a global variable, then executes + * the code multiple times in different contexts. The globals are set on and + * contained within each individual `context`. + * + * ```js + * const vm = require('node:vm'); + * + * const script = new vm.Script('globalVar = "set"'); + * + * const contexts = [{}, {}, {}]; + * contexts.forEach((context) => { + * script.runInNewContext(context); + * }); + * + * console.log(contexts); + * // Prints: [{ globalVar: 'set' }, { globalVar: 'set' }, { globalVar: 'set' }] + * ``` + * @since v0.3.1 + * @param contextObject An object that will be `contextified`. If `undefined`, a new object will be created. + * @return the result of the very last statement executed in the script. + */ + runInNewContext(contextObject?: Context, options?: RunningScriptInNewContextOptions): any; + /** + * Runs the compiled code contained by the `vm.Script` within the context of the + * current `global` object. Running code does not have access to local scope, but _does_ have access to the current `global` object. + * + * The following example compiles code that increments a `global` variable then + * executes that code multiple times: + * + * ```js + * const vm = require('node:vm'); + * + * global.globalVar = 0; + * + * const script = new vm.Script('globalVar += 1', { filename: 'myfile.vm' }); + * + * for (let i = 0; i < 1000; ++i) { + * script.runInThisContext(); + * } + * + * console.log(globalVar); + * + * // 1000 + * ``` + * @since v0.3.1 + * @return the result of the very last statement executed in the script. + */ + runInThisContext(options?: RunningScriptOptions): any; + /** + * Creates a code cache that can be used with the `Script` constructor's`cachedData` option. Returns a `Buffer`. This method may be called at any + * time and any number of times. + * + * The code cache of the `Script` doesn't contain any JavaScript observable + * states. The code cache is safe to be saved along side the script source and + * used to construct new `Script` instances multiple times. + * + * Functions in the `Script` source can be marked as lazily compiled and they are + * not compiled at construction of the `Script`. These functions are going to be + * compiled when they are invoked the first time. The code cache serializes the + * metadata that V8 currently knows about the `Script` that it can use to speed up + * future compilations. + * + * ```js + * const script = new vm.Script(` + * function add(a, b) { + * return a + b; + * } + * + * const x = add(1, 2); + * `); + * + * const cacheWithoutAdd = script.createCachedData(); + * // In `cacheWithoutAdd` the function `add()` is marked for full compilation + * // upon invocation. + * + * script.runInThisContext(); + * + * const cacheWithAdd = script.createCachedData(); + * // `cacheWithAdd` contains fully compiled function `add()`. + * ``` + * @since v10.6.0 + */ + createCachedData(): Buffer; + /** @deprecated in favor of `script.createCachedData()` */ + cachedDataProduced?: boolean | undefined; + /** + * When `cachedData` is supplied to create the `vm.Script`, this value will be set + * to either `true` or `false` depending on acceptance of the data by V8\. + * Otherwise the value is `undefined`. + * @since v5.7.0 + */ + cachedDataRejected?: boolean | undefined; + cachedData?: Buffer | undefined; + /** + * When the script is compiled from a source that contains a source map magic + * comment, this property will be set to the URL of the source map. + * + * ```js + * import vm from 'node:vm'; + * + * const script = new vm.Script(` + * function myFunc() {} + * //# sourceMappingURL=sourcemap.json + * `); + * + * console.log(script.sourceMapURL); + * // Prints: sourcemap.json + * ``` + * @since v19.1.0, v18.13.0 + */ + sourceMapURL?: string | undefined; + } + /** + * If given a `contextObject`, the `vm.createContext()` method will `prepare + * that object` so that it can be used in calls to {@link runInContext} or `script.runInContext()`. Inside such scripts, + * the `contextObject` will be the global object, retaining all of its existing + * properties but also having the built-in objects and functions any standard [global object](https://es5.github.io/#x15.1) has. Outside of scripts run by the vm module, global variables + * will remain unchanged. + * + * ```js + * const vm = require('node:vm'); + * + * global.globalVar = 3; + * + * const context = { globalVar: 1 }; + * vm.createContext(context); + * + * vm.runInContext('globalVar *= 2;', context); + * + * console.log(context); + * // Prints: { globalVar: 2 } + * + * console.log(global.globalVar); + * // Prints: 3 + * ``` + * + * If `contextObject` is omitted (or passed explicitly as `undefined`), a new, + * empty `contextified` object will be returned. + * + * The `vm.createContext()` method is primarily useful for creating a single + * context that can be used to run multiple scripts. For instance, if emulating a + * web browser, the method can be used to create a single context representing a + * window's global object, then run all `` + +[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) + +## methods + +`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. + +* `byteLength` - Takes a base64 string and returns length of byte array +* `toByteArray` - Takes a base64 string and returns a byte array +* `fromByteArray` - Takes a byte array and returns a base64 string + +## license + +MIT diff --git a/node_modules/base64-js/base64js.min.js b/node_modules/base64-js/base64js.min.js new file mode 100644 index 0000000..908ac83 --- /dev/null +++ b/node_modules/base64-js/base64js.min.js @@ -0,0 +1 @@ +(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} diff --git a/node_modules/base64-js/package.json b/node_modules/base64-js/package.json new file mode 100644 index 0000000..c3972e3 --- /dev/null +++ b/node_modules/base64-js/package.json @@ -0,0 +1,47 @@ +{ + "name": "base64-js", + "description": "Base64 encoding/decoding in pure JS", + "version": "1.5.1", + "author": "T. Jameson Little ", + "typings": "index.d.ts", + "bugs": { + "url": "https://github.com/beatgammit/base64-js/issues" + }, + "devDependencies": { + "babel-minify": "^0.5.1", + "benchmark": "^2.1.4", + "browserify": "^16.3.0", + "standard": "*", + "tape": "4.x" + }, + "homepage": "https://github.com/beatgammit/base64-js", + "keywords": [ + "base64" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/beatgammit/base64-js.git" + }, + "scripts": { + "build": "browserify -s base64js -r ./ | minify > base64js.min.js", + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/bignumber.js/CHANGELOG.md b/node_modules/bignumber.js/CHANGELOG.md new file mode 100644 index 0000000..9586720 --- /dev/null +++ b/node_modules/bignumber.js/CHANGELOG.md @@ -0,0 +1,295 @@ +#### 9.1.2 +* 28/08/23 +* #354 Amend `round` to avoid bug in v8 Maglev compiler. +* [BUGFIX] #344 `minumum(0, -0)` should be `-0`. + +#### 9.1.1 +* 04/12/22 +* #338 [BUGFIX] `exponentiatedBy`: ensure `0**-n === Infinity` for very large `n`. + +#### 9.1.0 +* 08/08/22 +* #329 Remove `import` example. +* #277 Resolve lint warnings and add number `toString` note. +* Correct `decimalPlaces()` return type in *bignumber.d.ts*. +* Add ES module global `crypto` example. +* #322 Add `exports` field to *package.json*. +* #251 (#308) Amend *bignumber.d.ts* to allow instantiating a BigNumber without `new`. + +#### 9.0.2 +* 12/12/21 +* #250 [BUGFIX] Allow use of user-defined alphabet for base 10. +* #295 Remove *bignumber.min.js* and amend *README.md*. +* Update *.travis.yml* and *LICENCE.md*. + +#### 9.0.1 +* 28/09/20 +* [BUGFIX] #276 Correct `sqrt` initial estimate. +* Update *.travis.yml*, *LICENCE.md* and *README.md*. + +#### 9.0.0 +* 27/05/2019 +* For compatibility with legacy browsers, remove `Symbol` references. + +#### 8.1.1 +* 24/02/2019 +* [BUGFIX] #222 Restore missing `var` to `export BigNumber`. +* Allow any key in BigNumber.Instance in *bignumber.d.ts*. + +#### 8.1.0 +* 23/02/2019 +* [NEW FEATURE] #220 Create a BigNumber using `{s, e, c}`. +* [NEW FEATURE] `isBigNumber`: if `BigNumber.DEBUG` is `true`, also check that the BigNumber instance is well-formed. +* Remove `instanceof` checks; just use `_isBigNumber` to identify a BigNumber instance. +* Add `_isBigNumber` to prototype in *bignumber.mjs*. +* Add tests for BigNumber creation from object. +* Update *API.html*. + +#### 8.0.2 +* 13/01/2019 +* #209 `toPrecision` without argument should follow `toString`. +* Improve *Use* section of *README*. +* Optimise `toString(10)`. +* Add verson number to API doc. + +#### 8.0.1 +* 01/11/2018 +* Rest parameter must be array type in *bignumber.d.ts*. + +#### 8.0.0 +* 01/11/2018 +* [NEW FEATURE] Add `BigNumber.sum` method. +* [NEW FEATURE]`toFormat`: add `prefix` and `suffix` options. +* [NEW FEATURE] #178 Pass custom formatting to `toFormat`. +* [BREAKING CHANGE] #184 `toFraction`: return array of BigNumbers not strings. +* [NEW FEATURE] #185 Enable overwrite of `valueOf` to prevent accidental addition to string. +* #183 Add Node.js `crypto` requirement to documentation. +* [BREAKING CHANGE] #198 Disallow signs and whitespace in custom alphabet. +* [NEW FEATURE] #188 Implement `util.inspect.custom` for Node.js REPL. +* #170 Make `isBigNumber` a type guard in *bignumber.d.ts*. +* [BREAKING CHANGE] `BigNumber.min` and `BigNumber.max`: don't accept an array. +* Update *.travis.yml*. +* Remove *bower.json*. + +#### 7.2.1 +* 24/05/2018 +* Add `browser` field to *package.json*. + +#### 7.2.0 +* 22/05/2018 +* #166 Correct *.mjs* file. Remove extension from `main` field in *package.json*. + +#### 7.1.0 +* 18/05/2018 +* Add `module` field to *package.json* for *bignumber.mjs*. + +#### 7.0.2 +* 17/05/2018 +* #165 Bugfix: upper-case letters for bases 11-36 in a custom alphabet. +* Add note to *README* regarding creating BigNumbers from Number values. + +#### 7.0.1 +* 26/04/2018 +* #158 Fix global object variable name typo. + +#### 7.0.0 +* 26/04/2018 +* #143 Remove global BigNumber from typings. +* #144 Enable compatibility with `Object.freeze(Object.prototype)`. +* #148 #123 #11 Only throw on a number primitive with more than 15 significant digits if `BigNumber.DEBUG` is `true`. +* Only throw on an invalid BigNumber value if `BigNumber.DEBUG` is `true`. Return BigNumber `NaN` instead. +* #154 `exponentiatedBy`: allow BigNumber exponent. +* #156 Prevent Content Security Policy *unsafe-eval* issue. +* `toFraction`: allow `Infinity` maximum denominator. +* Comment-out some excess tests to reduce test time. +* Amend indentation and other spacing. + +#### 6.0.0 +* 26/01/2018 +* #137 Implement `APLHABET` configuration option. +* Remove `ERRORS` configuration option. +* Remove `toDigits` method; extend `precision` method accordingly. +* Remove s`round` method; extend `decimalPlaces` method accordingly. +* Remove methods: `ceil`, `floor`, and `truncated`. +* Remove method aliases: `add`, `cmp`, `isInt`, `isNeg`, `trunc`, `mul`, `neg` and `sub`. +* Rename methods: `shift` to `shiftedBy`, `another` to `clone`, `toPower` to `exponentiatedBy`, and `equals` to `isEqualTo`. +* Rename methods: add `is` prefix to `greaterThan`, `greaterThanOrEqualTo`, `lessThan` and `lessThanOrEqualTo`. +* Add methods: `multipliedBy`, `isBigNumber`, `isPositive`, `integerValue`, `maximum` and `minimum`. +* Refactor test suite. +* Add *CHANGELOG.md*. +* Rewrite *bignumber.d.ts*. +* Redo API image. + +#### 5.0.0 +* 27/11/2017 +* #81 Don't throw on constructor call without `new`. + +#### 4.1.0 +* 26/09/2017 +* Remove node 0.6 from *.travis.yml*. +* Add *bignumber.mjs*. + +#### 4.0.4 +* 03/09/2017 +* Add missing aliases to *bignumber.d.ts*. + +#### 4.0.3 +* 30/08/2017 +* Add types: *bignumber.d.ts*. + +#### 4.0.2 +* 03/05/2017 +* #120 Workaround Safari/Webkit bug. + +#### 4.0.1 +* 05/04/2017 +* #121 BigNumber.default to BigNumber['default']. + +#### 4.0.0 +* 09/01/2017 +* Replace BigNumber.isBigNumber method with isBigNumber prototype property. + +#### 3.1.2 +* 08/01/2017 +* Minor documentation edit. + +#### 3.1.1 +* 08/01/2017 +* Uncomment `isBigNumber` tests. +* Ignore dot files. + +#### 3.1.0 +* 08/01/2017 +* Add `isBigNumber` method. + +#### 3.0.2 +* 08/01/2017 +* Bugfix: Possible incorrect value of `ERRORS` after a `BigNumber.another` call (due to `parseNumeric` declaration in outer scope). + +#### 3.0.1 +* 23/11/2016 +* Apply fix for old ipads with `%` issue, see #57 and #102. +* Correct error message. + +#### 3.0.0 +* 09/11/2016 +* Remove `require('crypto')` - leave it to the user. +* Add `BigNumber.set` as `BigNumber.config` alias. +* Default `POW_PRECISION` to `0`. + +#### 2.4.0 +* 14/07/2016 +* #97 Add exports to support ES6 imports. + +#### 2.3.0 +* 07/03/2016 +* #86 Add modulus parameter to `toPower`. + +#### 2.2.0 +* 03/03/2016 +* #91 Permit larger JS integers. + +#### 2.1.4 +* 15/12/2015 +* Correct UMD. + +#### 2.1.3 +* 13/12/2015 +* Refactor re global object and crypto availability when bundling. + +#### 2.1.2 +* 10/12/2015 +* Bugfix: `window.crypto` not assigned to `crypto`. + +#### 2.1.1 +* 09/12/2015 +* Prevent code bundler from adding `crypto` shim. + +#### 2.1.0 +* 26/10/2015 +* For `valueOf` and `toJSON`, include the minus sign with negative zero. + +#### 2.0.8 +* 2/10/2015 +* Internal round function bugfix. + +#### 2.0.6 +* 31/03/2015 +* Add bower.json. Tweak division after in-depth review. + +#### 2.0.5 +* 25/03/2015 +* Amend README. Remove bitcoin address. + +#### 2.0.4 +* 25/03/2015 +* Critical bugfix #58: division. + +#### 2.0.3 +* 18/02/2015 +* Amend README. Add source map. + +#### 2.0.2 +* 18/02/2015 +* Correct links. + +#### 2.0.1 +* 18/02/2015 +* Add `max`, `min`, `precision`, `random`, `shiftedBy`, `toDigits` and `truncated` methods. +* Add the short-forms: `add`, `mul`, `sd`, `sub` and `trunc`. +* Add an `another` method to enable multiple independent constructors to be created. +* Add support for the base 2, 8 and 16 prefixes `0b`, `0o` and `0x`. +* Enable a rounding mode to be specified as a second parameter to `toExponential`, `toFixed`, `toFormat` and `toPrecision`. +* Add a `CRYPTO` configuration property so cryptographically-secure pseudo-random number generation can be specified. +* Add a `MODULO_MODE` configuration property to enable the rounding mode used by the `modulo` operation to be specified. +* Add a `POW_PRECISION` configuration property to enable the number of significant digits calculated by the power operation to be limited. +* Improve code quality. +* Improve documentation. + +#### 2.0.0 +* 29/12/2014 +* Add `dividedToIntegerBy`, `isInteger` and `toFormat` methods. +* Remove the following short-forms: `isF`, `isZ`, `toE`, `toF`, `toFr`, `toN`, `toP`, `toS`. +* Store a BigNumber's coefficient in base 1e14, rather than base 10. +* Add fast path for integers to BigNumber constructor. +* Incorporate the library into the online documentation. + +#### 1.5.0 +* 13/11/2014 +* Add `toJSON` and `decimalPlaces` methods. + +#### 1.4.1 +* 08/06/2014 +* Amend README. + +#### 1.4.0 +* 08/05/2014 +* Add `toNumber`. + +#### 1.3.0 +* 08/11/2013 +* Ensure correct rounding of `sqrt` in all, rather than almost all, cases. +* Maximum radix to 64. + +#### 1.2.1 +* 17/10/2013 +* Sign of zero when x < 0 and x + (-x) = 0. + +#### 1.2.0 +* 19/9/2013 +* Throw Error objects for stack. + +#### 1.1.1 +* 22/8/2013 +* Show original value in constructor error message. + +#### 1.1.0 +* 1/8/2013 +* Allow numbers with trailing radix point. + +#### 1.0.1 +* Bugfix: error messages with incorrect method name + +#### 1.0.0 +* 8/11/2012 +* Initial release diff --git a/node_modules/bignumber.js/LICENCE.md b/node_modules/bignumber.js/LICENCE.md new file mode 100644 index 0000000..3c2f6a3 --- /dev/null +++ b/node_modules/bignumber.js/LICENCE.md @@ -0,0 +1,26 @@ +The MIT License (MIT) +===================== + +Copyright © `<2023>` `Michael Mclaughlin` + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the “Software”), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/bignumber.js/README.md b/node_modules/bignumber.js/README.md new file mode 100644 index 0000000..4785443 --- /dev/null +++ b/node_modules/bignumber.js/README.md @@ -0,0 +1,286 @@ +![bignumber.js](https://raw.githubusercontent.com/MikeMcl/bignumber.js/gh-pages/bignumberjs.png) + +A JavaScript library for arbitrary-precision decimal and non-decimal arithmetic. + +[![npm version](https://img.shields.io/npm/v/bignumber.js.svg)](https://www.npmjs.com/package/bignumber.js) +[![npm downloads](https://img.shields.io/npm/dw/bignumber.js)](https://www.npmjs.com/package/bignumber.js) + +
+ +## Features + +- Integers and decimals +- Simple API but full-featured +- Faster, smaller, and perhaps easier to use than JavaScript versions of Java's BigDecimal +- 8 KB minified and gzipped +- Replicates the `toExponential`, `toFixed`, `toPrecision` and `toString` methods of JavaScript's Number type +- Includes a `toFraction` and a correctly-rounded `squareRoot` method +- Supports cryptographically-secure pseudo-random number generation +- No dependencies +- Wide platform compatibility: uses JavaScript 1.5 (ECMAScript 3) features only +- Comprehensive [documentation](http://mikemcl.github.io/bignumber.js/) and test set + +![API](https://raw.githubusercontent.com/MikeMcl/bignumber.js/gh-pages/API.png) + +If a smaller and simpler library is required see [big.js](https://github.com/MikeMcl/big.js/). +It's less than half the size but only works with decimal numbers and only has half the methods. +It also has fewer configuration options than this library, and does not allow `NaN` or `Infinity`. + +See also [decimal.js](https://github.com/MikeMcl/decimal.js/), which among other things adds support for non-integer powers, and performs all operations to a specified number of significant digits. + +## Load + +The library is the single JavaScript file *bignumber.js* or ES module *bignumber.mjs*. + +### Browser + +```html + +``` + +> ES module + +```html + +``` + +### [Node.js](http://nodejs.org) + +```bash +npm install bignumber.js +``` + +```javascript +const BigNumber = require('bignumber.js'); +``` + +> ES module + +```javascript +import BigNumber from "bignumber.js"; +import { BigNumber } from "./node_modules/bignumber.js/bignumber.mjs"; +``` + +### [Deno](https://deno.land/) + +```javascript +import BigNumber from 'https://raw.githubusercontent.com/mikemcl/bignumber.js/v9.1.2/bignumber.mjs'; +import BigNumber from 'https://unpkg.com/bignumber.js@latest/bignumber.mjs'; +``` + +## Use + +The library exports a single constructor function, [`BigNumber`](http://mikemcl.github.io/bignumber.js/#bignumber), which accepts a value of type Number, String or BigNumber, + +```javascript +let x = new BigNumber(123.4567); +let y = BigNumber('123456.7e-3'); +let z = new BigNumber(x); +x.isEqualTo(y) && y.isEqualTo(z) && x.isEqualTo(z); // true +``` + +To get the string value of a BigNumber use [`toString()`](http://mikemcl.github.io/bignumber.js/#toS) or [`toFixed()`](http://mikemcl.github.io/bignumber.js/#toFix). Using `toFixed()` prevents exponential notation being returned, no matter how large or small the value. + +```javascript +let x = new BigNumber('1111222233334444555566'); +x.toString(); // "1.111222233334444555566e+21" +x.toFixed(); // "1111222233334444555566" +``` + +If the limited precision of Number values is not well understood, it is recommended to create BigNumbers from String values rather than Number values to avoid a potential loss of precision. + +*In all further examples below, `let`, semicolons and `toString` calls are not shown. If a commented-out value is in quotes it means `toString` has been called on the preceding expression.* + +```javascript +// Precision loss from using numeric literals with more than 15 significant digits. +new BigNumber(1.0000000000000001) // '1' +new BigNumber(88259496234518.57) // '88259496234518.56' +new BigNumber(99999999999999999999) // '100000000000000000000' + +// Precision loss from using numeric literals outside the range of Number values. +new BigNumber(2e+308) // 'Infinity' +new BigNumber(1e-324) // '0' + +// Precision loss from the unexpected result of arithmetic with Number values. +new BigNumber(0.7 + 0.1) // '0.7999999999999999' +``` + +When creating a BigNumber from a Number, note that a BigNumber is created from a Number's decimal `toString()` value not from its underlying binary value. If the latter is required, then pass the Number's `toString(2)` value and specify base 2. + +```javascript +new BigNumber(Number.MAX_VALUE.toString(2), 2) +``` + +BigNumbers can be created from values in bases from 2 to 36. See [`ALPHABET`](http://mikemcl.github.io/bignumber.js/#alphabet) to extend this range. + +```javascript +a = new BigNumber(1011, 2) // "11" +b = new BigNumber('zz.9', 36) // "1295.25" +c = a.plus(b) // "1306.25" +``` + +*Performance is better if base 10 is NOT specified for decimal values. Only specify base 10 when you want to limit the number of decimal places of the input value to the current [`DECIMAL_PLACES`](http://mikemcl.github.io/bignumber.js/#decimal-places) setting.* + +A BigNumber is immutable in the sense that it is not changed by its methods. + +```javascript +0.3 - 0.1 // 0.19999999999999998 +x = new BigNumber(0.3) +x.minus(0.1) // "0.2" +x // "0.3" +``` + +The methods that return a BigNumber can be chained. + +```javascript +x.dividedBy(y).plus(z).times(9) +x.times('1.23456780123456789e+9').plus(9876.5432321).dividedBy('4444562598.111772').integerValue() +``` + +Some of the longer method names have a shorter alias. + +```javascript +x.squareRoot().dividedBy(y).exponentiatedBy(3).isEqualTo(x.sqrt().div(y).pow(3)) // true +x.modulo(y).multipliedBy(z).eq(x.mod(y).times(z)) // true +``` + +As with JavaScript's Number type, there are [`toExponential`](http://mikemcl.github.io/bignumber.js/#toE), [`toFixed`](http://mikemcl.github.io/bignumber.js/#toFix) and [`toPrecision`](http://mikemcl.github.io/bignumber.js/#toP) methods. + +```javascript +x = new BigNumber(255.5) +x.toExponential(5) // "2.55500e+2" +x.toFixed(5) // "255.50000" +x.toPrecision(5) // "255.50" +x.toNumber() // 255.5 +``` + + A base can be specified for [`toString`](http://mikemcl.github.io/bignumber.js/#toS). + +*Performance is better if base 10 is NOT specified, i.e. use `toString()` not `toString(10)`. Only specify base 10 when you want to limit the number of decimal places of the string to the current [`DECIMAL_PLACES`](http://mikemcl.github.io/bignumber.js/#decimal-places) setting.* + + ```javascript + x.toString(16) // "ff.8" + ``` + +There is a [`toFormat`](http://mikemcl.github.io/bignumber.js/#toFor) method which may be useful for internationalisation. + +```javascript +y = new BigNumber('1234567.898765') +y.toFormat(2) // "1,234,567.90" +``` + +The maximum number of decimal places of the result of an operation involving division (i.e. a division, square root, base conversion or negative power operation) is set using the `set` or `config` method of the `BigNumber` constructor. + +The other arithmetic operations always give the exact result. + +```javascript +BigNumber.set({ DECIMAL_PLACES: 10, ROUNDING_MODE: 4 }) + +x = new BigNumber(2) +y = new BigNumber(3) +z = x.dividedBy(y) // "0.6666666667" +z.squareRoot() // "0.8164965809" +z.exponentiatedBy(-3) // "3.3749999995" +z.toString(2) // "0.1010101011" +z.multipliedBy(z) // "0.44444444448888888889" +z.multipliedBy(z).decimalPlaces(10) // "0.4444444445" +``` + +There is a [`toFraction`](http://mikemcl.github.io/bignumber.js/#toFr) method with an optional *maximum denominator* argument + +```javascript +y = new BigNumber(355) +pi = y.dividedBy(113) // "3.1415929204" +pi.toFraction() // [ "7853982301", "2500000000" ] +pi.toFraction(1000) // [ "355", "113" ] +``` + +and [`isNaN`](http://mikemcl.github.io/bignumber.js/#isNaN) and [`isFinite`](http://mikemcl.github.io/bignumber.js/#isF) methods, as `NaN` and `Infinity` are valid `BigNumber` values. + +```javascript +x = new BigNumber(NaN) // "NaN" +y = new BigNumber(Infinity) // "Infinity" +x.isNaN() && !y.isNaN() && !x.isFinite() && !y.isFinite() // true +``` + +The value of a BigNumber is stored in a decimal floating point format in terms of a coefficient, exponent and sign. + +```javascript +x = new BigNumber(-123.456); +x.c // [ 123, 45600000000000 ] coefficient (i.e. significand) +x.e // 2 exponent +x.s // -1 sign +``` + +For advanced usage, multiple BigNumber constructors can be created, each with its own independent configuration. + +```javascript +// Set DECIMAL_PLACES for the original BigNumber constructor +BigNumber.set({ DECIMAL_PLACES: 10 }) + +// Create another BigNumber constructor, optionally passing in a configuration object +BN = BigNumber.clone({ DECIMAL_PLACES: 5 }) + +x = new BigNumber(1) +y = new BN(1) + +x.div(3) // '0.3333333333' +y.div(3) // '0.33333' +``` + +To avoid having to call `toString` or `valueOf` on a BigNumber to get its value in the Node.js REPL or when using `console.log` use + +```javascript +BigNumber.prototype[require('util').inspect.custom] = BigNumber.prototype.valueOf; +``` + +For further information see the [API](http://mikemcl.github.io/bignumber.js/) reference in the *doc* directory. + +## Test + +The *test/modules* directory contains the test scripts for each method. + +The tests can be run with Node.js or a browser. For Node.js use + +```bash +npm test +``` + +or + +```bash +node test/test +``` + +To test a single method, use, for example + +```bash +node test/methods/toFraction +``` + +For the browser, open *test/test.html*. + +## Minify + +To minify using, for example, [terser](https://github.com/terser/terser) + +```bash +npm install -g terser +``` + +```bash +terser big.js -c -m -o big.min.js +``` + +## Licence + +The MIT Licence. + +See [LICENCE](https://github.com/MikeMcl/bignumber.js/blob/master/LICENCE). diff --git a/node_modules/bignumber.js/bignumber.d.ts b/node_modules/bignumber.js/bignumber.d.ts new file mode 100644 index 0000000..c5a60a8 --- /dev/null +++ b/node_modules/bignumber.js/bignumber.d.ts @@ -0,0 +1,1831 @@ +// Type definitions for bignumber.js >=8.1.0 +// Project: https://github.com/MikeMcl/bignumber.js +// Definitions by: Michael Mclaughlin +// Definitions: https://github.com/MikeMcl/bignumber.js + +// Documentation: http://mikemcl.github.io/bignumber.js/ +// +// Exports: +// +// class BigNumber (default export) +// type BigNumber.Constructor +// type BigNumber.ModuloMode +// type BigNumber.RoundingMode +// type BigNumber.Value +// interface BigNumber.Config +// interface BigNumber.Format +// interface BigNumber.Instance +// +// Example: +// +// import {BigNumber} from "bignumber.js" +// //import BigNumber from "bignumber.js" +// +// let rm: BigNumber.RoundingMode = BigNumber.ROUND_UP; +// let f: BigNumber.Format = { decimalSeparator: ',' }; +// let c: BigNumber.Config = { DECIMAL_PLACES: 4, ROUNDING_MODE: rm, FORMAT: f }; +// BigNumber.config(c); +// +// let v: BigNumber.Value = '12345.6789'; +// let b: BigNumber = new BigNumber(v); +// +// The use of compiler option `--strictNullChecks` is recommended. + +export default BigNumber; + +export namespace BigNumber { + + /** See `BigNumber.config` (alias `BigNumber.set`) and `BigNumber.clone`. */ + interface Config { + + /** + * An integer, 0 to 1e+9. Default value: 20. + * + * The maximum number of decimal places of the result of operations involving division, i.e. + * division, square root and base conversion operations, and exponentiation when the exponent is + * negative. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * BigNumber.set({ DECIMAL_PLACES: 5 }) + * ``` + */ + DECIMAL_PLACES?: number; + + /** + * An integer, 0 to 8. Default value: `BigNumber.ROUND_HALF_UP` (4). + * + * The rounding mode used in operations that involve division (see `DECIMAL_PLACES`) and the + * default rounding mode of the `decimalPlaces`, `precision`, `toExponential`, `toFixed`, + * `toFormat` and `toPrecision` methods. + * + * The modes are available as enumerated properties of the BigNumber constructor. + * + * ```ts + * BigNumber.config({ ROUNDING_MODE: 0 }) + * BigNumber.set({ ROUNDING_MODE: BigNumber.ROUND_UP }) + * ``` + */ + ROUNDING_MODE?: BigNumber.RoundingMode; + + /** + * An integer, 0 to 1e+9, or an array, [-1e+9 to 0, 0 to 1e+9]. + * Default value: `[-7, 20]`. + * + * The exponent value(s) at which `toString` returns exponential notation. + * + * If a single number is assigned, the value is the exponent magnitude. + * + * If an array of two numbers is assigned then the first number is the negative exponent value at + * and beneath which exponential notation is used, and the second number is the positive exponent + * value at and above which exponential notation is used. + * + * For example, to emulate JavaScript numbers in terms of the exponent values at which they begin + * to use exponential notation, use `[-7, 20]`. + * + * ```ts + * BigNumber.config({ EXPONENTIAL_AT: 2 }) + * new BigNumber(12.3) // '12.3' e is only 1 + * new BigNumber(123) // '1.23e+2' + * new BigNumber(0.123) // '0.123' e is only -1 + * new BigNumber(0.0123) // '1.23e-2' + * + * BigNumber.config({ EXPONENTIAL_AT: [-7, 20] }) + * new BigNumber(123456789) // '123456789' e is only 8 + * new BigNumber(0.000000123) // '1.23e-7' + * + * // Almost never return exponential notation: + * BigNumber.config({ EXPONENTIAL_AT: 1e+9 }) + * + * // Always return exponential notation: + * BigNumber.config({ EXPONENTIAL_AT: 0 }) + * ``` + * + * Regardless of the value of `EXPONENTIAL_AT`, the `toFixed` method will always return a value in + * normal notation and the `toExponential` method will always return a value in exponential form. + * Calling `toString` with a base argument, e.g. `toString(10)`, will also always return normal + * notation. + */ + EXPONENTIAL_AT?: number | [number, number]; + + /** + * An integer, magnitude 1 to 1e+9, or an array, [-1e+9 to -1, 1 to 1e+9]. + * Default value: `[-1e+9, 1e+9]`. + * + * The exponent value(s) beyond which overflow to Infinity and underflow to zero occurs. + * + * If a single number is assigned, it is the maximum exponent magnitude: values wth a positive + * exponent of greater magnitude become Infinity and those with a negative exponent of greater + * magnitude become zero. + * + * If an array of two numbers is assigned then the first number is the negative exponent limit and + * the second number is the positive exponent limit. + * + * For example, to emulate JavaScript numbers in terms of the exponent values at which they + * become zero and Infinity, use [-324, 308]. + * + * ```ts + * BigNumber.config({ RANGE: 500 }) + * BigNumber.config().RANGE // [ -500, 500 ] + * new BigNumber('9.999e499') // '9.999e+499' + * new BigNumber('1e500') // 'Infinity' + * new BigNumber('1e-499') // '1e-499' + * new BigNumber('1e-500') // '0' + * + * BigNumber.config({ RANGE: [-3, 4] }) + * new BigNumber(99999) // '99999' e is only 4 + * new BigNumber(100000) // 'Infinity' e is 5 + * new BigNumber(0.001) // '0.01' e is only -3 + * new BigNumber(0.0001) // '0' e is -4 + * ``` + * The largest possible magnitude of a finite BigNumber is 9.999...e+1000000000. + * The smallest possible magnitude of a non-zero BigNumber is 1e-1000000000. + */ + RANGE?: number | [number, number]; + + /** + * A boolean: `true` or `false`. Default value: `false`. + * + * The value that determines whether cryptographically-secure pseudo-random number generation is + * used. If `CRYPTO` is set to true then the random method will generate random digits using + * `crypto.getRandomValues` in browsers that support it, or `crypto.randomBytes` if using a + * version of Node.js that supports it. + * + * If neither function is supported by the host environment then attempting to set `CRYPTO` to + * `true` will fail and an exception will be thrown. + * + * If `CRYPTO` is `false` then the source of randomness used will be `Math.random` (which is + * assumed to generate at least 30 bits of randomness). + * + * See `BigNumber.random`. + * + * ```ts + * // Node.js + * global.crypto = require('crypto') + * + * BigNumber.config({ CRYPTO: true }) + * BigNumber.config().CRYPTO // true + * BigNumber.random() // 0.54340758610486147524 + * ``` + */ + CRYPTO?: boolean; + + /** + * An integer, 0, 1, 3, 6 or 9. Default value: `BigNumber.ROUND_DOWN` (1). + * + * The modulo mode used when calculating the modulus: `a mod n`. + * The quotient, `q = a / n`, is calculated according to the `ROUNDING_MODE` that corresponds to + * the chosen `MODULO_MODE`. + * The remainder, `r`, is calculated as: `r = a - n * q`. + * + * The modes that are most commonly used for the modulus/remainder operation are shown in the + * following table. Although the other rounding modes can be used, they may not give useful + * results. + * + * Property | Value | Description + * :------------------|:------|:------------------------------------------------------------------ + * `ROUND_UP` | 0 | The remainder is positive if the dividend is negative. + * `ROUND_DOWN` | 1 | The remainder has the same sign as the dividend. + * | | Uses 'truncating division' and matches JavaScript's `%` operator . + * `ROUND_FLOOR` | 3 | The remainder has the same sign as the divisor. + * | | This matches Python's `%` operator. + * `ROUND_HALF_EVEN` | 6 | The IEEE 754 remainder function. + * `EUCLID` | 9 | The remainder is always positive. + * | | Euclidian division: `q = sign(n) * floor(a / abs(n))` + * + * The rounding/modulo modes are available as enumerated properties of the BigNumber constructor. + * + * See `modulo`. + * + * ```ts + * BigNumber.config({ MODULO_MODE: BigNumber.EUCLID }) + * BigNumber.set({ MODULO_MODE: 9 }) // equivalent + * ``` + */ + MODULO_MODE?: BigNumber.ModuloMode; + + /** + * An integer, 0 to 1e+9. Default value: 0. + * + * The maximum precision, i.e. number of significant digits, of the result of the power operation + * - unless a modulus is specified. + * + * If set to 0, the number of significant digits will not be limited. + * + * See `exponentiatedBy`. + * + * ```ts + * BigNumber.config({ POW_PRECISION: 100 }) + * ``` + */ + POW_PRECISION?: number; + + /** + * An object including any number of the properties shown below. + * + * The object configures the format of the string returned by the `toFormat` method. + * The example below shows the properties of the object that are recognised, and + * their default values. + * + * Unlike the other configuration properties, the values of the properties of the `FORMAT` object + * will not be checked for validity - the existing object will simply be replaced by the object + * that is passed in. + * + * See `toFormat`. + * + * ```ts + * BigNumber.config({ + * FORMAT: { + * // string to prepend + * prefix: '', + * // the decimal separator + * decimalSeparator: '.', + * // the grouping separator of the integer part + * groupSeparator: ',', + * // the primary grouping size of the integer part + * groupSize: 3, + * // the secondary grouping size of the integer part + * secondaryGroupSize: 0, + * // the grouping separator of the fraction part + * fractionGroupSeparator: ' ', + * // the grouping size of the fraction part + * fractionGroupSize: 0, + * // string to append + * suffix: '' + * } + * }) + * ``` + */ + FORMAT?: BigNumber.Format; + + /** + * The alphabet used for base conversion. The length of the alphabet corresponds to the maximum + * value of the base argument that can be passed to the BigNumber constructor or `toString`. + * + * Default value: `'0123456789abcdefghijklmnopqrstuvwxyz'`. + * + * There is no maximum length for the alphabet, but it must be at least 2 characters long, + * and it must not contain whitespace or a repeated character, or the sign indicators '+' and + * '-', or the decimal separator '.'. + * + * ```ts + * // duodecimal (base 12) + * BigNumber.config({ ALPHABET: '0123456789TE' }) + * x = new BigNumber('T', 12) + * x.toString() // '10' + * x.toString(12) // 'T' + * ``` + */ + ALPHABET?: string; + } + + /** See `FORMAT` and `toFormat`. */ + interface Format { + + /** The string to prepend. */ + prefix?: string; + + /** The decimal separator. */ + decimalSeparator?: string; + + /** The grouping separator of the integer part. */ + groupSeparator?: string; + + /** The primary grouping size of the integer part. */ + groupSize?: number; + + /** The secondary grouping size of the integer part. */ + secondaryGroupSize?: number; + + /** The grouping separator of the fraction part. */ + fractionGroupSeparator?: string; + + /** The grouping size of the fraction part. */ + fractionGroupSize?: number; + + /** The string to append. */ + suffix?: string; + } + + interface Instance { + + /** The coefficient of the value of this BigNumber, an array of base 1e14 integer numbers, or null. */ + readonly c: number[] | null; + + /** The exponent of the value of this BigNumber, an integer number, -1000000000 to 1000000000, or null. */ + readonly e: number | null; + + /** The sign of the value of this BigNumber, -1, 1, or null. */ + readonly s: number | null; + + [key: string]: any; + } + + type Constructor = typeof BigNumber; + type ModuloMode = 0 | 1 | 3 | 6 | 9; + type RoundingMode = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8; + type Value = string | number | Instance; +} + +export declare class BigNumber implements BigNumber.Instance { + + /** Used internally to identify a BigNumber instance. */ + private readonly _isBigNumber: true; + + /** The coefficient of the value of this BigNumber, an array of base 1e14 integer numbers, or null. */ + readonly c: number[] | null; + + /** The exponent of the value of this BigNumber, an integer number, -1000000000 to 1000000000, or null. */ + readonly e: number | null; + + /** The sign of the value of this BigNumber, -1, 1, or null. */ + readonly s: number | null; + + /** + * Returns a new instance of a BigNumber object with value `n`, where `n` is a numeric value in + * the specified `base`, or base 10 if `base` is omitted or is `null` or `undefined`. + * + * ```ts + * x = new BigNumber(123.4567) // '123.4567' + * // 'new' is optional + * y = BigNumber(x) // '123.4567' + * ``` + * + * If `n` is a base 10 value it can be in normal (fixed-point) or exponential notation. + * Values in other bases must be in normal notation. Values in any base can have fraction digits, + * i.e. digits after the decimal point. + * + * ```ts + * new BigNumber(43210) // '43210' + * new BigNumber('4.321e+4') // '43210' + * new BigNumber('-735.0918e-430') // '-7.350918e-428' + * new BigNumber('123412421.234324', 5) // '607236.557696' + * ``` + * + * Signed `0`, signed `Infinity` and `NaN` are supported. + * + * ```ts + * new BigNumber('-Infinity') // '-Infinity' + * new BigNumber(NaN) // 'NaN' + * new BigNumber(-0) // '0' + * new BigNumber('.5') // '0.5' + * new BigNumber('+2') // '2' + * ``` + * + * String values in hexadecimal literal form, e.g. `'0xff'`, are valid, as are string values with + * the octal and binary prefixs `'0o'` and `'0b'`. String values in octal literal form without the + * prefix will be interpreted as decimals, e.g. `'011'` is interpreted as 11, not 9. + * + * ```ts + * new BigNumber(-10110100.1, 2) // '-180.5' + * new BigNumber('-0b10110100.1') // '-180.5' + * new BigNumber('ff.8', 16) // '255.5' + * new BigNumber('0xff.8') // '255.5' + * ``` + * + * If a base is specified, `n` is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. This includes base 10, so don't include a `base` parameter for decimal + * values unless this behaviour is desired. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * new BigNumber(1.23456789) // '1.23456789' + * new BigNumber(1.23456789, 10) // '1.23457' + * ``` + * + * An error is thrown if `base` is invalid. + * + * There is no limit to the number of digits of a value of type string (other than that of + * JavaScript's maximum array size). See `RANGE` to set the maximum and minimum possible exponent + * value of a BigNumber. + * + * ```ts + * new BigNumber('5032485723458348569331745.33434346346912144534543') + * new BigNumber('4.321e10000000') + * ``` + * + * BigNumber `NaN` is returned if `n` is invalid (unless `BigNumber.DEBUG` is `true`, see below). + * + * ```ts + * new BigNumber('.1*') // 'NaN' + * new BigNumber('blurgh') // 'NaN' + * new BigNumber(9, 2) // 'NaN' + * ``` + * + * To aid in debugging, if `BigNumber.DEBUG` is `true` then an error will be thrown on an + * invalid `n`. An error will also be thrown if `n` is of type number with more than 15 + * significant digits, as calling `toString` or `valueOf` on these numbers may not result in the + * intended value. + * + * ```ts + * console.log(823456789123456.3) // 823456789123456.2 + * new BigNumber(823456789123456.3) // '823456789123456.2' + * BigNumber.DEBUG = true + * // 'Error: Number has more than 15 significant digits' + * new BigNumber(823456789123456.3) + * // 'Error: Not a base 2 number' + * new BigNumber(9, 2) + * ``` + * + * A BigNumber can also be created from an object literal. + * Use `isBigNumber` to check that it is well-formed. + * + * ```ts + * new BigNumber({ s: 1, e: 2, c: [ 777, 12300000000000 ], _isBigNumber: true }) // '777.123' + * ``` + * + * @param n A numeric value. + * @param base The base of `n`, integer, 2 to 36 (or `ALPHABET.length`, see `ALPHABET`). + */ + constructor(n: BigNumber.Value, base?: number); + + /** + * Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of this + * BigNumber. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber(-0.8) + * x.absoluteValue() // '0.8' + * ``` + */ + absoluteValue(): BigNumber; + + /** + * Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of this + * BigNumber. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber(-0.8) + * x.abs() // '0.8' + * ``` + */ + abs(): BigNumber; + + /** + * Returns | | + * :-------:|:--------------------------------------------------------------| + * 1 | If the value of this BigNumber is greater than the value of `n` + * -1 | If the value of this BigNumber is less than the value of `n` + * 0 | If this BigNumber and `n` have the same value + * `null` | If the value of either this BigNumber or `n` is `NaN` + * + * ```ts + * + * x = new BigNumber(Infinity) + * y = new BigNumber(5) + * x.comparedTo(y) // 1 + * x.comparedTo(x.minus(1)) // 0 + * y.comparedTo(NaN) // null + * y.comparedTo('110', 2) // -1 + * ``` + * @param n A numeric value. + * @param [base] The base of n. + */ + comparedTo(n: BigNumber.Value, base?: number): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded by rounding mode + * `roundingMode` to a maximum of `decimalPlaces` decimal places. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the return value is the number of + * decimal places of the value of this BigNumber, or `null` if the value of this BigNumber is + * ±`Infinity` or `NaN`. + * + * If `roundingMode` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(1234.56) + * x.decimalPlaces() // 2 + * x.decimalPlaces(1) // '1234.6' + * x.decimalPlaces(2) // '1234.56' + * x.decimalPlaces(10) // '1234.56' + * x.decimalPlaces(0, 1) // '1234' + * x.decimalPlaces(0, 6) // '1235' + * x.decimalPlaces(1, 1) // '1234.5' + * x.decimalPlaces(1, BigNumber.ROUND_HALF_EVEN) // '1234.6' + * x // '1234.56' + * y = new BigNumber('9.9e-101') + * y.decimalPlaces() // 102 + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + decimalPlaces(): number | null; + decimalPlaces(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded by rounding mode + * `roundingMode` to a maximum of `decimalPlaces` decimal places. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the return value is the number of + * decimal places of the value of this BigNumber, or `null` if the value of this BigNumber is + * ±`Infinity` or `NaN`. + * + * If `roundingMode` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(1234.56) + * x.dp() // 2 + * x.dp(1) // '1234.6' + * x.dp(2) // '1234.56' + * x.dp(10) // '1234.56' + * x.dp(0, 1) // '1234' + * x.dp(0, 6) // '1235' + * x.dp(1, 1) // '1234.5' + * x.dp(1, BigNumber.ROUND_HALF_EVEN) // '1234.6' + * x // '1234.56' + * y = new BigNumber('9.9e-101') + * y.dp() // 102 + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + dp(): number | null; + dp(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber divided by `n`, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * ```ts + * x = new BigNumber(355) + * y = new BigNumber(113) + * x.dividedBy(y) // '3.14159292035398230088' + * x.dividedBy(5) // '71' + * x.dividedBy(47, 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + dividedBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber divided by `n`, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * ```ts + * x = new BigNumber(355) + * y = new BigNumber(113) + * x.div(y) // '3.14159292035398230088' + * x.div(5) // '71' + * x.div(47, 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + div(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + * `n`. + * + * ```ts + * x = new BigNumber(5) + * y = new BigNumber(3) + * x.dividedToIntegerBy(y) // '1' + * x.dividedToIntegerBy(0.7) // '7' + * x.dividedToIntegerBy('0.f', 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + dividedToIntegerBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + * `n`. + * + * ```ts + * x = new BigNumber(5) + * y = new BigNumber(3) + * x.idiv(y) // '1' + * x.idiv(0.7) // '7' + * x.idiv('0.f', 16) // '5' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + idiv(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber exponentiated by `n`, i.e. + * raised to the power `n`, and optionally modulo a modulus `m`. + * + * If `n` is negative the result is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. + * + * As the number of digits of the result of the power operation can grow so large so quickly, + * e.g. 123.456**10000 has over 50000 digits, the number of significant digits calculated is + * limited to the value of the `POW_PRECISION` setting (unless a modulus `m` is specified). + * + * By default `POW_PRECISION` is set to 0. This means that an unlimited number of significant + * digits will be calculated, and that the method's performance will decrease dramatically for + * larger exponents. + * + * If `m` is specified and the value of `m`, `n` and this BigNumber are integers and `n` is + * positive, then a fast modular exponentiation algorithm is used, otherwise the operation will + * be performed as `x.exponentiatedBy(n).modulo(m)` with a `POW_PRECISION` of 0. + * + * Throws if `n` is not an integer. + * + * ```ts + * Math.pow(0.7, 2) // 0.48999999999999994 + * x = new BigNumber(0.7) + * x.exponentiatedBy(2) // '0.49' + * BigNumber(3).exponentiatedBy(-2) // '0.11111111111111111111' + * ``` + * + * @param n The exponent, an integer. + * @param [m] The modulus. + */ + exponentiatedBy(n: BigNumber.Value, m?: BigNumber.Value): BigNumber; + exponentiatedBy(n: number, m?: BigNumber.Value): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber exponentiated by `n`, i.e. + * raised to the power `n`, and optionally modulo a modulus `m`. + * + * If `n` is negative the result is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings. + * + * As the number of digits of the result of the power operation can grow so large so quickly, + * e.g. 123.456**10000 has over 50000 digits, the number of significant digits calculated is + * limited to the value of the `POW_PRECISION` setting (unless a modulus `m` is specified). + * + * By default `POW_PRECISION` is set to 0. This means that an unlimited number of significant + * digits will be calculated, and that the method's performance will decrease dramatically for + * larger exponents. + * + * If `m` is specified and the value of `m`, `n` and this BigNumber are integers and `n` is + * positive, then a fast modular exponentiation algorithm is used, otherwise the operation will + * be performed as `x.pow(n).modulo(m)` with a `POW_PRECISION` of 0. + * + * Throws if `n` is not an integer. + * + * ```ts + * Math.pow(0.7, 2) // 0.48999999999999994 + * x = new BigNumber(0.7) + * x.pow(2) // '0.49' + * BigNumber(3).pow(-2) // '0.11111111111111111111' + * ``` + * + * @param n The exponent, an integer. + * @param [m] The modulus. + */ + pow(n: BigNumber.Value, m?: BigNumber.Value): BigNumber; + pow(n: number, m?: BigNumber.Value): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to an integer using + * rounding mode `rm`. + * + * If `rm` is omitted, or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `rm` is invalid. + * + * ```ts + * x = new BigNumber(123.456) + * x.integerValue() // '123' + * x.integerValue(BigNumber.ROUND_CEIL) // '124' + * y = new BigNumber(-12.7) + * y.integerValue() // '-13' + * x.integerValue(BigNumber.ROUND_DOWN) // '-12' + * ``` + * + * @param {BigNumber.RoundingMode} [rm] The roundng mode, an integer, 0 to 8. + */ + integerValue(rm?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns `true` if the value of this BigNumber is equal to the value of `n`, otherwise returns + * `false`. + * + * As with JavaScript, `NaN` does not equal `NaN`. + * + * ```ts + * 0 === 1e-324 // true + * x = new BigNumber(0) + * x.isEqualTo('1e-324') // false + * BigNumber(-0).isEqualTo(x) // true ( -0 === 0 ) + * BigNumber(255).isEqualTo('ff', 16) // true + * + * y = new BigNumber(NaN) + * y.isEqualTo(NaN) // false + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is equal to the value of `n`, otherwise returns + * `false`. + * + * As with JavaScript, `NaN` does not equal `NaN`. + * + * ```ts + * 0 === 1e-324 // true + * x = new BigNumber(0) + * x.eq('1e-324') // false + * BigNumber(-0).eq(x) // true ( -0 === 0 ) + * BigNumber(255).eq('ff', 16) // true + * + * y = new BigNumber(NaN) + * y.eq(NaN) // false + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + eq(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is a finite number, otherwise returns `false`. + * + * The only possible non-finite values of a BigNumber are `NaN`, `Infinity` and `-Infinity`. + * + * ```ts + * x = new BigNumber(1) + * x.isFinite() // true + * y = new BigNumber(Infinity) + * y.isFinite() // false + * ``` + */ + isFinite(): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than the value of `n`, otherwise + * returns `false`. + * + * ```ts + * 0.1 > (0.3 - 0.2) // true + * x = new BigNumber(0.1) + * x.isGreaterThan(BigNumber(0.3).minus(0.2)) // false + * BigNumber(0).isGreaterThan(x) // false + * BigNumber(11, 3).isGreaterThan(11.1, 2) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isGreaterThan(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than the value of `n`, otherwise + * returns `false`. + * + * ```ts + * 0.1 > (0.3 - 0 // true + * x = new BigNumber(0.1) + * x.gt(BigNumber(0.3).minus(0.2)) // false + * BigNumber(0).gt(x) // false + * BigNumber(11, 3).gt(11.1, 2) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + gt(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * (0.3 - 0.2) >= 0.1 // false + * x = new BigNumber(0.3).minus(0.2) + * x.isGreaterThanOrEqualTo(0.1) // true + * BigNumber(1).isGreaterThanOrEqualTo(x) // true + * BigNumber(10, 18).isGreaterThanOrEqualTo('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isGreaterThanOrEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is greater than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * (0.3 - 0.2) >= 0.1 // false + * x = new BigNumber(0.3).minus(0.2) + * x.gte(0.1) // true + * BigNumber(1).gte(x) // true + * BigNumber(10, 18).gte('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + gte(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is an integer, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(1) + * x.isInteger() // true + * y = new BigNumber(123.456) + * y.isInteger() // false + * ``` + */ + isInteger(): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than the value of `n`, otherwise returns + * `false`. + * + * ```ts + * (0.3 - 0.2) < 0.1 // true + * x = new BigNumber(0.3).minus(0.2) + * x.isLessThan(0.1) // false + * BigNumber(0).isLessThan(x) // true + * BigNumber(11.1, 2).isLessThan(11, 3) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isLessThan(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than the value of `n`, otherwise returns + * `false`. + * + * ```ts + * (0.3 - 0.2) < 0.1 // true + * x = new BigNumber(0.3).minus(0.2) + * x.lt(0.1) // false + * BigNumber(0).lt(x) // true + * BigNumber(11.1, 2).lt(11, 3) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + lt(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * 0.1 <= (0.3 - 0.2) // false + * x = new BigNumber(0.1) + * x.isLessThanOrEqualTo(BigNumber(0.3).minus(0.2)) // true + * BigNumber(-1).isLessThanOrEqualTo(x) // true + * BigNumber(10, 18).isLessThanOrEqualTo('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + isLessThanOrEqualTo(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is less than or equal to the value of `n`, + * otherwise returns `false`. + * + * ```ts + * 0.1 <= (0.3 - 0.2) // false + * x = new BigNumber(0.1) + * x.lte(BigNumber(0.3).minus(0.2)) // true + * BigNumber(-1).lte(x) // true + * BigNumber(10, 18).lte('i', 36) // true + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + lte(n: BigNumber.Value, base?: number): boolean; + + /** + * Returns `true` if the value of this BigNumber is `NaN`, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(NaN) + * x.isNaN() // true + * y = new BigNumber('Infinity') + * y.isNaN() // false + * ``` + */ + isNaN(): boolean; + + /** + * Returns `true` if the value of this BigNumber is negative, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isNegative() // true + * y = new BigNumber(2) + * y.isNegative() // false + * ``` + */ + isNegative(): boolean; + + /** + * Returns `true` if the value of this BigNumber is positive, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isPositive() // false + * y = new BigNumber(2) + * y.isPositive() // true + * ``` + */ + isPositive(): boolean; + + /** + * Returns `true` if the value of this BigNumber is zero or minus zero, otherwise returns `false`. + * + * ```ts + * x = new BigNumber(-0) + * x.isZero() // true + * ``` + */ + isZero(): boolean; + + /** + * Returns a BigNumber whose value is the value of this BigNumber minus `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.3 - 0.1 // 0.19999999999999998 + * x = new BigNumber(0.3) + * x.minus(0.1) // '0.2' + * x.minus(0.6, 20) // '0' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + minus(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber modulo `n`, i.e. the integer + * remainder of dividing this BigNumber by `n`. + * + * The value returned, and in particular its sign, is dependent on the value of the `MODULO_MODE` + * setting of this BigNumber constructor. If it is 1 (default value), the result will have the + * same sign as this BigNumber, and it will match that of Javascript's `%` operator (within the + * limits of double precision) and BigDecimal's `remainder` method. + * + * The return value is always exact and unrounded. + * + * See `MODULO_MODE` for a description of the other modulo modes. + * + * ```ts + * 1 % 0.9 // 0.09999999999999998 + * x = new BigNumber(1) + * x.modulo(0.9) // '0.1' + * y = new BigNumber(33) + * y.modulo('a', 33) // '3' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + modulo(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber modulo `n`, i.e. the integer + * remainder of dividing this BigNumber by `n`. + * + * The value returned, and in particular its sign, is dependent on the value of the `MODULO_MODE` + * setting of this BigNumber constructor. If it is 1 (default value), the result will have the + * same sign as this BigNumber, and it will match that of Javascript's `%` operator (within the + * limits of double precision) and BigDecimal's `remainder` method. + * + * The return value is always exact and unrounded. + * + * See `MODULO_MODE` for a description of the other modulo modes. + * + * ```ts + * 1 % 0.9 // 0.09999999999999998 + * x = new BigNumber(1) + * x.mod(0.9) // '0.1' + * y = new BigNumber(33) + * y.mod('a', 33) // '3' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + mod(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber multiplied by `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.6 * 3 // 1.7999999999999998 + * x = new BigNumber(0.6) + * y = x.multipliedBy(3) // '1.8' + * BigNumber('7e+500').multipliedBy(y) // '1.26e+501' + * x.multipliedBy('-a', 16) // '-6' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + multipliedBy(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber multiplied by `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.6 * 3 // 1.7999999999999998 + * x = new BigNumber(0.6) + * y = x.times(3) // '1.8' + * BigNumber('7e+500').times(y) // '1.26e+501' + * x.times('-a', 16) // '-6' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + times(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber negated, i.e. multiplied by -1. + * + * ```ts + * x = new BigNumber(1.8) + * x.negated() // '-1.8' + * y = new BigNumber(-1.3) + * y.negated() // '1.3' + * ``` + */ + negated(): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber plus `n`. + * + * The return value is always exact and unrounded. + * + * ```ts + * 0.1 + 0.2 // 0.30000000000000004 + * x = new BigNumber(0.1) + * y = x.plus(0.2) // '0.3' + * BigNumber(0.7).plus(x).plus(y) // '1.1' + * x.plus('0.1', 8) // '0.225' + * ``` + * + * @param n A numeric value. + * @param [base] The base of n. + */ + plus(n: BigNumber.Value, base?: number): BigNumber; + + /** + * Returns the number of significant digits of the value of this BigNumber, or `null` if the value + * of this BigNumber is ±`Infinity` or `NaN`. + * + * If `includeZeros` is true then any trailing zeros of the integer part of the value of this + * BigNumber are counted as significant digits, otherwise they are not. + * + * Throws if `includeZeros` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.precision() // 9 + * y = new BigNumber(987000) + * y.precision(false) // 3 + * y.precision(true) // 6 + * ``` + * + * @param [includeZeros] Whether to include integer trailing zeros in the significant digit count. + */ + precision(includeZeros?: boolean): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to a precision of + * `significantDigits` significant digits using rounding mode `roundingMode`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` will be used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.precision(6) // '9876.54' + * x.precision(6, BigNumber.ROUND_UP) // '9876.55' + * x.precision(2) // '9900' + * x.precision(2, 1) // '9800' + * x // '9876.54321' + * ``` + * + * @param significantDigits Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + precision(significantDigits: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns the number of significant digits of the value of this BigNumber, + * or `null` if the value of this BigNumber is ±`Infinity` or `NaN`. + * + * If `includeZeros` is true then any trailing zeros of the integer part of + * the value of this BigNumber are counted as significant digits, otherwise + * they are not. + * + * Throws if `includeZeros` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.sd() // 9 + * y = new BigNumber(987000) + * y.sd(false) // 3 + * y.sd(true) // 6 + * ``` + * + * @param [includeZeros] Whether to include integer trailing zeros in the significant digit count. + */ + sd(includeZeros?: boolean): number; + + /** + * Returns a BigNumber whose value is the value of this BigNumber rounded to a precision of + * `significantDigits` significant digits using rounding mode `roundingMode`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` will be used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = new BigNumber(9876.54321) + * x.sd(6) // '9876.54' + * x.sd(6, BigNumber.ROUND_UP) // '9876.55' + * x.sd(2) // '9900' + * x.sd(2, 1) // '9800' + * x // '9876.54321' + * ``` + * + * @param significantDigits Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + sd(significantDigits: number, roundingMode?: BigNumber.RoundingMode): BigNumber; + + /** + * Returns a BigNumber whose value is the value of this BigNumber shifted by `n` places. + * + * The shift is of the decimal point, i.e. of powers of ten, and is to the left if `n` is negative + * or to the right if `n` is positive. + * + * The return value is always exact and unrounded. + * + * Throws if `n` is invalid. + * + * ```ts + * x = new BigNumber(1.23) + * x.shiftedBy(3) // '1230' + * x.shiftedBy(-3) // '0.00123' + * ``` + * + * @param n The shift value, integer, -9007199254740991 to 9007199254740991. + */ + shiftedBy(n: number): BigNumber; + + /** + * Returns a BigNumber whose value is the square root of the value of this BigNumber, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * The return value will be correctly rounded, i.e. rounded as if the result was first calculated + * to an infinite number of correct digits before rounding. + * + * ```ts + * x = new BigNumber(16) + * x.squareRoot() // '4' + * y = new BigNumber(3) + * y.squareRoot() // '1.73205080756887729353' + * ``` + */ + squareRoot(): BigNumber; + + /** + * Returns a BigNumber whose value is the square root of the value of this BigNumber, rounded + * according to the current `DECIMAL_PLACES` and `ROUNDING_MODE` settings. + * + * The return value will be correctly rounded, i.e. rounded as if the result was first calculated + * to an infinite number of correct digits before rounding. + * + * ```ts + * x = new BigNumber(16) + * x.sqrt() // '4' + * y = new BigNumber(3) + * y.sqrt() // '1.73205080756887729353' + * ``` + */ + sqrt(): BigNumber; + + /** + * Returns a string representing the value of this BigNumber in exponential notation rounded using + * rounding mode `roundingMode` to `decimalPlaces` decimal places, i.e with one digit before the + * decimal point and `decimalPlaces` digits after it. + * + * If the value of this BigNumber in exponential notation has fewer than `decimalPlaces` fraction + * digits, the return value will be appended with zeros accordingly. + * + * If `decimalPlaces` is omitted, or is `null` or `undefined`, the number of digits after the + * decimal point defaults to the minimum number of digits necessary to represent the value + * exactly. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = 45.6 + * y = new BigNumber(x) + * x.toExponential() // '4.56e+1' + * y.toExponential() // '4.56e+1' + * x.toExponential(0) // '5e+1' + * y.toExponential(0) // '5e+1' + * x.toExponential(1) // '4.6e+1' + * y.toExponential(1) // '4.6e+1' + * y.toExponential(1, 1) // '4.5e+1' (ROUND_DOWN) + * x.toExponential(3) // '4.560e+1' + * y.toExponential(3) // '4.560e+1' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + toExponential(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toExponential(): string; + + /** + * Returns a string representing the value of this BigNumber in normal (fixed-point) notation + * rounded to `decimalPlaces` decimal places using rounding mode `roundingMode`. + * + * If the value of this BigNumber in normal notation has fewer than `decimalPlaces` fraction + * digits, the return value will be appended with zeros accordingly. + * + * Unlike `Number.prototype.toFixed`, which returns exponential notation if a number is greater or + * equal to 10**21, this method will always return normal notation. + * + * If `decimalPlaces` is omitted or is `null` or `undefined`, the return value will be unrounded + * and in normal notation. This is also unlike `Number.prototype.toFixed`, which returns the value + * to zero decimal places. It is useful when normal notation is required and the current + * `EXPONENTIAL_AT` setting causes `toString` to return exponential notation. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `decimalPlaces` or `roundingMode` is invalid. + * + * ```ts + * x = 3.456 + * y = new BigNumber(x) + * x.toFixed() // '3' + * y.toFixed() // '3.456' + * y.toFixed(0) // '3' + * x.toFixed(2) // '3.46' + * y.toFixed(2) // '3.46' + * y.toFixed(2, 1) // '3.45' (ROUND_DOWN) + * x.toFixed(5) // '3.45600' + * y.toFixed(5) // '3.45600' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + */ + toFixed(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toFixed(): string; + + /** + * Returns a string representing the value of this BigNumber in normal (fixed-point) notation + * rounded to `decimalPlaces` decimal places using rounding mode `roundingMode`, and formatted + * according to the properties of the `format` or `FORMAT` object. + * + * The formatting object may contain some or all of the properties shown in the examples below. + * + * If `decimalPlaces` is omitted or is `null` or `undefined`, then the return value is not + * rounded to a fixed number of decimal places. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * If `format` is omitted or is `null` or `undefined`, `FORMAT` is used. + * + * Throws if `decimalPlaces`, `roundingMode`, or `format` is invalid. + * + * ```ts + * fmt = { + * decimalSeparator: '.', + * groupSeparator: ',', + * groupSize: 3, + * secondaryGroupSize: 0, + * fractionGroupSeparator: ' ', + * fractionGroupSize: 0 + * } + * + * x = new BigNumber('123456789.123456789') + * + * // Set the global formatting options + * BigNumber.config({ FORMAT: fmt }) + * + * x.toFormat() // '123,456,789.123456789' + * x.toFormat(3) // '123,456,789.123' + * + * // If a reference to the object assigned to FORMAT has been retained, + * // the format properties can be changed directly + * fmt.groupSeparator = ' ' + * fmt.fractionGroupSize = 5 + * x.toFormat() // '123 456 789.12345 6789' + * + * // Alternatively, pass the formatting options as an argument + * fmt = { + * decimalSeparator: ',', + * groupSeparator: '.', + * groupSize: 3, + * secondaryGroupSize: 2 + * } + * + * x.toFormat() // '123 456 789.12345 6789' + * x.toFormat(fmt) // '12.34.56.789,123456789' + * x.toFormat(2, fmt) // '12.34.56.789,12' + * x.toFormat(3, BigNumber.ROUND_UP, fmt) // '12.34.56.789,124' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + * @param [roundingMode] Rounding mode, integer, 0 to 8. + * @param [format] Formatting options object. See `BigNumber.Format`. + */ + toFormat(decimalPlaces: number, roundingMode: BigNumber.RoundingMode, format?: BigNumber.Format): string; + toFormat(decimalPlaces: number, roundingMode?: BigNumber.RoundingMode): string; + toFormat(decimalPlaces?: number): string; + toFormat(decimalPlaces: number, format: BigNumber.Format): string; + toFormat(format: BigNumber.Format): string; + + /** + * Returns an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to `max_denominator`. + * If a maximum denominator, `max_denominator`, is not specified, or is `null` or `undefined`, the + * denominator will be the lowest value necessary to represent the number exactly. + * + * Throws if `max_denominator` is invalid. + * + * ```ts + * x = new BigNumber(1.75) + * x.toFraction() // '7, 4' + * + * pi = new BigNumber('3.14159265358') + * pi.toFraction() // '157079632679,50000000000' + * pi.toFraction(100000) // '312689, 99532' + * pi.toFraction(10000) // '355, 113' + * pi.toFraction(100) // '311, 99' + * pi.toFraction(10) // '22, 7' + * pi.toFraction(1) // '3, 1' + * ``` + * + * @param [max_denominator] The maximum denominator, integer > 0, or Infinity. + */ + toFraction(max_denominator?: BigNumber.Value): [BigNumber, BigNumber]; + + /** As `valueOf`. */ + toJSON(): string; + + /** + * Returns the value of this BigNumber as a JavaScript primitive number. + * + * Using the unary plus operator gives the same result. + * + * ```ts + * x = new BigNumber(456.789) + * x.toNumber() // 456.789 + * +x // 456.789 + * + * y = new BigNumber('45987349857634085409857349856430985') + * y.toNumber() // 4.598734985763409e+34 + * + * z = new BigNumber(-0) + * 1 / z.toNumber() // -Infinity + * 1 / +z // -Infinity + * ``` + */ + toNumber(): number; + + /** + * Returns a string representing the value of this BigNumber rounded to `significantDigits` + * significant digits using rounding mode `roundingMode`. + * + * If `significantDigits` is less than the number of digits necessary to represent the integer + * part of the value in normal (fixed-point) notation, then exponential notation is used. + * + * If `significantDigits` is omitted, or is `null` or `undefined`, then the return value is the + * same as `n.toString()`. + * + * If `roundingMode` is omitted or is `null` or `undefined`, `ROUNDING_MODE` is used. + * + * Throws if `significantDigits` or `roundingMode` is invalid. + * + * ```ts + * x = 45.6 + * y = new BigNumber(x) + * x.toPrecision() // '45.6' + * y.toPrecision() // '45.6' + * x.toPrecision(1) // '5e+1' + * y.toPrecision(1) // '5e+1' + * y.toPrecision(2, 0) // '4.6e+1' (ROUND_UP) + * y.toPrecision(2, 1) // '4.5e+1' (ROUND_DOWN) + * x.toPrecision(5) // '45.600' + * y.toPrecision(5) // '45.600' + * ``` + * + * @param [significantDigits] Significant digits, integer, 1 to 1e+9. + * @param [roundingMode] Rounding mode, integer 0 to 8. + */ + toPrecision(significantDigits: number, roundingMode?: BigNumber.RoundingMode): string; + toPrecision(): string; + + /** + * Returns a string representing the value of this BigNumber in base `base`, or base 10 if `base` + * is omitted or is `null` or `undefined`. + * + * For bases above 10, and using the default base conversion alphabet (see `ALPHABET`), values + * from 10 to 35 are represented by a-z (the same as `Number.prototype.toString`). + * + * If a base is specified the value is rounded according to the current `DECIMAL_PLACES` and + * `ROUNDING_MODE` settings, otherwise it is not. + * + * If a base is not specified, and this BigNumber has a positive exponent that is equal to or + * greater than the positive component of the current `EXPONENTIAL_AT` setting, or a negative + * exponent equal to or less than the negative component of the setting, then exponential notation + * is returned. + * + * If `base` is `null` or `undefined` it is ignored. + * + * Throws if `base` is invalid. + * + * ```ts + * x = new BigNumber(750000) + * x.toString() // '750000' + * BigNumber.config({ EXPONENTIAL_AT: 5 }) + * x.toString() // '7.5e+5' + * + * y = new BigNumber(362.875) + * y.toString(2) // '101101010.111' + * y.toString(9) // '442.77777777777777777778' + * y.toString(32) // 'ba.s' + * + * BigNumber.config({ DECIMAL_PLACES: 4 }); + * z = new BigNumber('1.23456789') + * z.toString() // '1.23456789' + * z.toString(10) // '1.2346' + * ``` + * + * @param [base] The base, integer, 2 to 36 (or `ALPHABET.length`, see `ALPHABET`). + */ + toString(base?: number): string; + + /** + * As `toString`, but does not accept a base argument and includes the minus sign for negative + * zero. + * + * ``ts + * x = new BigNumber('-0') + * x.toString() // '0' + * x.valueOf() // '-0' + * y = new BigNumber('1.777e+457') + * y.valueOf() // '1.777e+457' + * ``` + */ + valueOf(): string; + + /** Helps ES6 import. */ + private static readonly default?: BigNumber.Constructor; + + /** Helps ES6 import. */ + private static readonly BigNumber?: BigNumber.Constructor; + + /** Rounds away from zero. */ + static readonly ROUND_UP: 0; + + /** Rounds towards zero. */ + static readonly ROUND_DOWN: 1; + + /** Rounds towards Infinity. */ + static readonly ROUND_CEIL: 2; + + /** Rounds towards -Infinity. */ + static readonly ROUND_FLOOR: 3; + + /** Rounds towards nearest neighbour. If equidistant, rounds away from zero . */ + static readonly ROUND_HALF_UP: 4; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards zero. */ + static readonly ROUND_HALF_DOWN: 5; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards even neighbour. */ + static readonly ROUND_HALF_EVEN: 6; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards Infinity. */ + static readonly ROUND_HALF_CEIL: 7; + + /** Rounds towards nearest neighbour. If equidistant, rounds towards -Infinity. */ + static readonly ROUND_HALF_FLOOR: 8; + + /** See `MODULO_MODE`. */ + static readonly EUCLID: 9; + + /** + * To aid in debugging, if a `BigNumber.DEBUG` property is `true` then an error will be thrown + * if the BigNumber constructor receives an invalid `BigNumber.Value`, or if `BigNumber.isBigNumber` + * receives a BigNumber instance that is malformed. + * + * ```ts + * // No error, and BigNumber NaN is returned. + * new BigNumber('blurgh') // 'NaN' + * new BigNumber(9, 2) // 'NaN' + * BigNumber.DEBUG = true + * new BigNumber('blurgh') // '[BigNumber Error] Not a number' + * new BigNumber(9, 2) // '[BigNumber Error] Not a base 2 number' + * ``` + * + * An error will also be thrown if a `BigNumber.Value` is of type number with more than 15 + * significant digits, as calling `toString` or `valueOf` on such numbers may not result + * in the intended value. + * + * ```ts + * console.log(823456789123456.3) // 823456789123456.2 + * // No error, and the returned BigNumber does not have the same value as the number literal. + * new BigNumber(823456789123456.3) // '823456789123456.2' + * BigNumber.DEBUG = true + * new BigNumber(823456789123456.3) + * // '[BigNumber Error] Number primitive has more than 15 significant digits' + * ``` + * + * Check that a BigNumber instance is well-formed: + * + * ```ts + * x = new BigNumber(10) + * + * BigNumber.DEBUG = false + * // Change x.c to an illegitimate value. + * x.c = NaN + * // No error, as BigNumber.DEBUG is false. + * BigNumber.isBigNumber(x) // true + * + * BigNumber.DEBUG = true + * BigNumber.isBigNumber(x) // '[BigNumber Error] Invalid BigNumber' + * ``` + */ + static DEBUG?: boolean; + + /** + * Returns a new independent BigNumber constructor with configuration as described by `object`, or + * with the default configuration if object is `null` or `undefined`. + * + * Throws if `object` is not an object. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 5 }) + * BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) + * + * x = new BigNumber(1) + * y = new BN(1) + * + * x.div(3) // 0.33333 + * y.div(3) // 0.333333333 + * + * // BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) is equivalent to: + * BN = BigNumber.clone() + * BN.config({ DECIMAL_PLACES: 9 }) + * ``` + * + * @param [object] The configuration object. + */ + static clone(object?: BigNumber.Config): BigNumber.Constructor; + + /** + * Configures the settings that apply to this BigNumber constructor. + * + * The configuration object, `object`, contains any number of the properties shown in the example + * below. + * + * Returns an object with the above properties and their current values. + * + * Throws if `object` is not an object, or if an invalid value is assigned to one or more of the + * properties. + * + * ```ts + * BigNumber.config({ + * DECIMAL_PLACES: 40, + * ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL, + * EXPONENTIAL_AT: [-10, 20], + * RANGE: [-500, 500], + * CRYPTO: true, + * MODULO_MODE: BigNumber.ROUND_FLOOR, + * POW_PRECISION: 80, + * FORMAT: { + * groupSize: 3, + * groupSeparator: ' ', + * decimalSeparator: ',' + * }, + * ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + * }); + * + * BigNumber.config().DECIMAL_PLACES // 40 + * ``` + * + * @param object The configuration object. + */ + static config(object?: BigNumber.Config): BigNumber.Config; + + /** + * Returns `true` if `value` is a BigNumber instance, otherwise returns `false`. + * + * If `BigNumber.DEBUG` is `true`, throws if a BigNumber instance is not well-formed. + * + * ```ts + * x = 42 + * y = new BigNumber(x) + * + * BigNumber.isBigNumber(x) // false + * y instanceof BigNumber // true + * BigNumber.isBigNumber(y) // true + * + * BN = BigNumber.clone(); + * z = new BN(x) + * z instanceof BigNumber // false + * BigNumber.isBigNumber(z) // true + * ``` + * + * @param value The value to test. + */ + static isBigNumber(value: any): value is BigNumber; + + /** + * Returns a BigNumber whose value is the maximum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.maximum(4e9, x, '123456789.9') // '4000000000' + * + * arr = [12, '13', new BigNumber(14)] + * BigNumber.maximum.apply(null, arr) // '14' + * ``` + * + * @param n A numeric value. + */ + static maximum(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the maximum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.max(4e9, x, '123456789.9') // '4000000000' + * + * arr = [12, '13', new BigNumber(14)] + * BigNumber.max.apply(null, arr) // '14' + * ``` + * + * @param n A numeric value. + */ + static max(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the minimum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.minimum(4e9, x, '123456789.9') // '123456789.9' + * + * arr = [2, new BigNumber(-14), '-15.9999', -12] + * BigNumber.minimum.apply(null, arr) // '-15.9999' + * ``` + * + * @param n A numeric value. + */ + static minimum(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a BigNumber whose value is the minimum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.min(4e9, x, '123456789.9') // '123456789.9' + * + * arr = [2, new BigNumber(-14), '-15.9999', -12] + * BigNumber.min.apply(null, arr) // '-15.9999' + * ``` + * + * @param n A numeric value. + */ + static min(...n: BigNumber.Value[]): BigNumber; + + /** + * Returns a new BigNumber with a pseudo-random value equal to or greater than 0 and less than 1. + * + * The return value will have `decimalPlaces` decimal places, or less if trailing zeros are + * produced. If `decimalPlaces` is omitted, the current `DECIMAL_PLACES` setting will be used. + * + * Depending on the value of this BigNumber constructor's `CRYPTO` setting and the support for the + * `crypto` object in the host environment, the random digits of the return value are generated by + * either `Math.random` (fastest), `crypto.getRandomValues` (Web Cryptography API in recent + * browsers) or `crypto.randomBytes` (Node.js). + * + * To be able to set `CRYPTO` to true when using Node.js, the `crypto` object must be available + * globally: + * + * ```ts + * global.crypto = require('crypto') + * ``` + * + * If `CRYPTO` is true, i.e. one of the `crypto` methods is to be used, the value of a returned + * BigNumber should be cryptographically secure and statistically indistinguishable from a random + * value. + * + * Throws if `decimalPlaces` is invalid. + * + * ```ts + * BigNumber.config({ DECIMAL_PLACES: 10 }) + * BigNumber.random() // '0.4117936847' + * BigNumber.random(20) // '0.78193327636914089009' + * ``` + * + * @param [decimalPlaces] Decimal places, integer, 0 to 1e+9. + */ + static random(decimalPlaces?: number): BigNumber; + + /** + * Returns a BigNumber whose value is the sum of the arguments. + * + * The return value is always exact and unrounded. + * + * ```ts + * x = new BigNumber('3257869345.0378653') + * BigNumber.sum(4e9, x, '123456789.9') // '7381326134.9378653' + * + * arr = [2, new BigNumber(14), '15.9999', 12] + * BigNumber.sum.apply(null, arr) // '43.9999' + * ``` + * + * @param n A numeric value. + */ + static sum(...n: BigNumber.Value[]): BigNumber; + + /** + * Configures the settings that apply to this BigNumber constructor. + * + * The configuration object, `object`, contains any number of the properties shown in the example + * below. + * + * Returns an object with the above properties and their current values. + * + * Throws if `object` is not an object, or if an invalid value is assigned to one or more of the + * properties. + * + * ```ts + * BigNumber.set({ + * DECIMAL_PLACES: 40, + * ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL, + * EXPONENTIAL_AT: [-10, 20], + * RANGE: [-500, 500], + * CRYPTO: true, + * MODULO_MODE: BigNumber.ROUND_FLOOR, + * POW_PRECISION: 80, + * FORMAT: { + * groupSize: 3, + * groupSeparator: ' ', + * decimalSeparator: ',' + * }, + * ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + * }); + * + * BigNumber.set().DECIMAL_PLACES // 40 + * ``` + * + * @param object The configuration object. + */ + static set(object?: BigNumber.Config): BigNumber.Config; +} + +export function BigNumber(n: BigNumber.Value, base?: number): BigNumber; diff --git a/node_modules/bignumber.js/bignumber.js b/node_modules/bignumber.js/bignumber.js new file mode 100644 index 0000000..e5c66c8 --- /dev/null +++ b/node_modules/bignumber.js/bignumber.js @@ -0,0 +1,2922 @@ +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.1.2 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // THe index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if (typeof module != 'undefined' && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); diff --git a/node_modules/bignumber.js/bignumber.mjs b/node_modules/bignumber.js/bignumber.mjs new file mode 100644 index 0000000..f154015 --- /dev/null +++ b/node_modules/bignumber.js/bignumber.mjs @@ -0,0 +1,2907 @@ +/* + * bignumber.js v9.1.2 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + +var + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + +/* + * Create and return a BigNumber constructor. + */ +function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // THe index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + P[Symbol.toStringTag] = 'BigNumber'; + + // Node.js v10.12.0+ + P[Symbol.for('nodejs.util.inspect.custom')] = P.valueOf; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; +} + + +// PRIVATE HELPER FUNCTIONS + +// These functions don't need access to variables, +// e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + +function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; +} + + +// Return a coefficient array as a string of base 10 digits. +function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); +} + + +// Compare the value of BigNumbers x and y. +function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; +} + + +/* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ +function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } +} + + +// Assumes finite n. +function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; +} + + +function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; +} + + +function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; +} + + +// EXPORT + + +export var BigNumber = clone(); + +export default BigNumber; diff --git a/node_modules/bignumber.js/doc/API.html b/node_modules/bignumber.js/doc/API.html new file mode 100644 index 0000000..bb2b7dd --- /dev/null +++ b/node_modules/bignumber.js/doc/API.html @@ -0,0 +1,2249 @@ + + + + + + +bignumber.js API + + + + + + +
+ +

bignumber.js

+ +

A JavaScript library for arbitrary-precision arithmetic.

+

Hosted on GitHub.

+ +

API

+ +

+ See the README on GitHub for a + quick-start introduction. +

+

+ In all examples below, var and semicolons are not shown, and if a commented-out + value is in quotes it means toString has been called on the preceding expression. +

+ + +

CONSTRUCTOR

+ + +
+ BigNumberBigNumber(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number: integer, 2 to 36 inclusive. (See + ALPHABET to extend this range). +

+

+ Returns a new instance of a BigNumber object with value n, where n + is a numeric value in the specified base, or base 10 if + base is omitted or is null or undefined. +

+

+ Note that the BigNnumber constructor accepts an n of type number purely + as a convenience so that string quotes don't have to be typed when entering literal values, + and that it is the toString value of n that is used rather than its + underlying binary floating point value converted to decimal. +

+
+x = new BigNumber(123.4567)                // '123.4567'
+// 'new' is optional
+y = BigNumber(x)                           // '123.4567'
+

+ If n is a base 10 value it can be in normal or exponential notation. + Values in other bases must be in normal notation. Values in any base can have fraction digits, + i.e. digits after the decimal point. +

+
+new BigNumber(43210)                       // '43210'
+new BigNumber('4.321e+4')                  // '43210'
+new BigNumber('-735.0918e-430')            // '-7.350918e-428'
+new BigNumber('123412421.234324', 5)       // '607236.557696'
+

+ Signed 0, signed Infinity and NaN are supported. +

+
+new BigNumber('-Infinity')                 // '-Infinity'
+new BigNumber(NaN)                         // 'NaN'
+new BigNumber(-0)                          // '0'
+new BigNumber('.5')                        // '0.5'
+new BigNumber('+2')                        // '2'
+

+ String values in hexadecimal literal form, e.g. '0xff' or '0xFF' + (but not '0xfF'), are valid, as are string values with the octal and binary + prefixs '0o' and '0b'. String values in octal literal form without + the prefix will be interpreted as decimals, e.g. '011' is interpreted as 11, not 9. +

+
+new BigNumber(-10110100.1, 2)              // '-180.5'
+new BigNumber('-0b10110100.1')             // '-180.5'
+new BigNumber('ff.8', 16)                  // '255.5'
+new BigNumber('0xff.8')                    // '255.5'
+

+ If a base is specified, n is rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. This includes base + 10 so don't include a base parameter for decimal values unless + this behaviour is wanted. +

+
BigNumber.config({ DECIMAL_PLACES: 5 })
+new BigNumber(1.23456789)                  // '1.23456789'
+new BigNumber(1.23456789, 10)              // '1.23457'
+

An error is thrown if base is invalid. See Errors.

+

+ There is no limit to the number of digits of a value of type string (other than + that of JavaScript's maximum array size). See RANGE to set + the maximum and minimum possible exponent value of a BigNumber. +

+
+new BigNumber('5032485723458348569331745.33434346346912144534543')
+new BigNumber('4.321e10000000')
+

BigNumber NaN is returned if n is invalid + (unless BigNumber.DEBUG is true, see below).

+
+new BigNumber('.1*')                       // 'NaN'
+new BigNumber('blurgh')                    // 'NaN'
+new BigNumber(9, 2)                        // 'NaN'
+

+ To aid in debugging, if BigNumber.DEBUG is true then an error will + be thrown on an invalid n. An error will also be thrown if n is of + type number and has more than 15 significant digits, as calling + toString or valueOf on + these numbers may not result in the intended value. +

+
+console.log(823456789123456.3)            //  823456789123456.2
+new BigNumber(823456789123456.3)          // '823456789123456.2'
+BigNumber.DEBUG = true
+// '[BigNumber Error] Number primitive has more than 15 significant digits'
+new BigNumber(823456789123456.3)
+// '[BigNumber Error] Not a base 2 number'
+new BigNumber(9, 2)
+

+ A BigNumber can also be created from an object literal. + Use isBigNumber to check that it is well-formed. +

+
new BigNumber({ s: 1, e: 2, c: [ 777, 12300000000000 ], _isBigNumber: true })    // '777.123'
+ + + + +

Methods

+

The static methods of a BigNumber constructor.

+ + + + +
clone + .clone([object]) ⇒ BigNumber constructor +
+

object: object

+

+ Returns a new independent BigNumber constructor with configuration as described by + object (see config), or with the default + configuration if object is null or undefined. +

+

+ Throws if object is not an object. See Errors. +

+
BigNumber.config({ DECIMAL_PLACES: 5 })
+BN = BigNumber.clone({ DECIMAL_PLACES: 9 })
+
+x = new BigNumber(1)
+y = new BN(1)
+
+x.div(3)                        // 0.33333
+y.div(3)                        // 0.333333333
+
+// BN = BigNumber.clone({ DECIMAL_PLACES: 9 }) is equivalent to:
+BN = BigNumber.clone()
+BN.config({ DECIMAL_PLACES: 9 })
+ + + +
configset([object]) ⇒ object
+

+ object: object: an object that contains some or all of the following + properties. +

+

Configures the settings for this particular BigNumber constructor.

+ +
+
DECIMAL_PLACES
+
+ number: integer, 0 to 1e+9 inclusive
+ Default value: 20 +
+
+ The maximum number of decimal places of the results of operations involving + division, i.e. division, square root and base conversion operations, and power operations + with negative exponents.
+
+
+
BigNumber.config({ DECIMAL_PLACES: 5 })
+BigNumber.set({ DECIMAL_PLACES: 5 })    // equivalent
+
+ + + +
ROUNDING_MODE
+
+ number: integer, 0 to 8 inclusive
+ Default value: 4 (ROUND_HALF_UP) +
+
+ The rounding mode used in the above operations and the default rounding mode of + decimalPlaces, + precision, + toExponential, + toFixed, + toFormat and + toPrecision. +
+
The modes are available as enumerated properties of the BigNumber constructor.
+
+
BigNumber.config({ ROUNDING_MODE: 0 })
+BigNumber.set({ ROUNDING_MODE: BigNumber.ROUND_UP })    // equivalent
+
+ + + +
EXPONENTIAL_AT
+
+ number: integer, magnitude 0 to 1e+9 inclusive, or +
+ number[]: [ integer -1e+9 to 0 inclusive, integer + 0 to 1e+9 inclusive ]
+ Default value: [-7, 20] +
+
+ The exponent value(s) at which toString returns exponential notation. +
+
+ If a single number is assigned, the value is the exponent magnitude.
+ If an array of two numbers is assigned then the first number is the negative exponent + value at and beneath which exponential notation is used, and the second number is the + positive exponent value at and above which the same. +
+
+ For example, to emulate JavaScript numbers in terms of the exponent values at which they + begin to use exponential notation, use [-7, 20]. +
+
+
BigNumber.config({ EXPONENTIAL_AT: 2 })
+new BigNumber(12.3)         // '12.3'        e is only 1
+new BigNumber(123)          // '1.23e+2'
+new BigNumber(0.123)        // '0.123'       e is only -1
+new BigNumber(0.0123)       // '1.23e-2'
+
+BigNumber.config({ EXPONENTIAL_AT: [-7, 20] })
+new BigNumber(123456789)    // '123456789'   e is only 8
+new BigNumber(0.000000123)  // '1.23e-7'
+
+// Almost never return exponential notation:
+BigNumber.config({ EXPONENTIAL_AT: 1e+9 })
+
+// Always return exponential notation:
+BigNumber.config({ EXPONENTIAL_AT: 0 })
+
+
+ Regardless of the value of EXPONENTIAL_AT, the toFixed method + will always return a value in normal notation and the toExponential method + will always return a value in exponential form. +
+
+ Calling toString with a base argument, e.g. toString(10), will + also always return normal notation. +
+ + + +
RANGE
+
+ number: integer, magnitude 1 to 1e+9 inclusive, or +
+ number[]: [ integer -1e+9 to -1 inclusive, integer + 1 to 1e+9 inclusive ]
+ Default value: [-1e+9, 1e+9] +
+
+ The exponent value(s) beyond which overflow to Infinity and underflow to + zero occurs. +
+
+ If a single number is assigned, it is the maximum exponent magnitude: values wth a + positive exponent of greater magnitude become Infinity and those with a + negative exponent of greater magnitude become zero. +
+ If an array of two numbers is assigned then the first number is the negative exponent + limit and the second number is the positive exponent limit. +
+
+ For example, to emulate JavaScript numbers in terms of the exponent values at which they + become zero and Infinity, use [-324, 308]. +
+
+
BigNumber.config({ RANGE: 500 })
+BigNumber.config().RANGE     // [ -500, 500 ]
+new BigNumber('9.999e499')   // '9.999e+499'
+new BigNumber('1e500')       // 'Infinity'
+new BigNumber('1e-499')      // '1e-499'
+new BigNumber('1e-500')      // '0'
+
+BigNumber.config({ RANGE: [-3, 4] })
+new BigNumber(99999)         // '99999'      e is only 4
+new BigNumber(100000)        // 'Infinity'   e is 5
+new BigNumber(0.001)         // '0.01'       e is only -3
+new BigNumber(0.0001)        // '0'          e is -4
+
+
+ The largest possible magnitude of a finite BigNumber is + 9.999...e+1000000000.
+ The smallest possible magnitude of a non-zero BigNumber is 1e-1000000000. +
+ + + +
CRYPTO
+
+ boolean: true or false.
+ Default value: false +
+
+ The value that determines whether cryptographically-secure pseudo-random number + generation is used. +
+
+ If CRYPTO is set to true then the + random method will generate random digits using + crypto.getRandomValues in browsers that support it, or + crypto.randomBytes if using Node.js. +
+
+ If neither function is supported by the host environment then attempting to set + CRYPTO to true will fail and an exception will be thrown. +
+
+ If CRYPTO is false then the source of randomness used will be + Math.random (which is assumed to generate at least 30 bits of + randomness). +
+
See random.
+
+
+// Node.js
+const crypto = require('crypto');   // CommonJS
+import * as crypto from 'crypto';   // ES module
+
+global.crypto = crypto;
+
+BigNumber.config({ CRYPTO: true })
+BigNumber.config().CRYPTO       // true
+BigNumber.random()              // 0.54340758610486147524
+
+ + + +
MODULO_MODE
+
+ number: integer, 0 to 9 inclusive
+ Default value: 1 (ROUND_DOWN) +
+
The modulo mode used when calculating the modulus: a mod n.
+
+ The quotient, q = a / n, is calculated according to the + ROUNDING_MODE that corresponds to the chosen + MODULO_MODE. +
+
The remainder, r, is calculated as: r = a - n * q.
+
+ The modes that are most commonly used for the modulus/remainder operation are shown in + the following table. Although the other rounding modes can be used, they may not give + useful results. +
+
+ + + + + + + + + + + + + + + + + + + + + + +
PropertyValueDescription
ROUND_UP0 + The remainder is positive if the dividend is negative, otherwise it is negative. +
ROUND_DOWN1 + The remainder has the same sign as the dividend.
+ This uses 'truncating division' and matches the behaviour of JavaScript's + remainder operator %. +
ROUND_FLOOR3 + The remainder has the same sign as the divisor.
+ This matches Python's % operator. +
ROUND_HALF_EVEN6The IEEE 754 remainder function.
EUCLID9 + The remainder is always positive. Euclidian division:
+ q = sign(n) * floor(a / abs(n)) +
+
+
+ The rounding/modulo modes are available as enumerated properties of the BigNumber + constructor. +
+
See modulo.
+
+
BigNumber.config({ MODULO_MODE: BigNumber.EUCLID })
+BigNumber.config({ MODULO_MODE: 9 })          // equivalent
+
+ + + +
POW_PRECISION
+
+ number: integer, 0 to 1e+9 inclusive.
+ Default value: 0 +
+
+ The maximum precision, i.e. number of significant digits, of the result of the power + operation (unless a modulus is specified). +
+
If set to 0, the number of significant digits will not be limited.
+
See exponentiatedBy.
+
BigNumber.config({ POW_PRECISION: 100 })
+ + + +
FORMAT
+
object
+
+ The FORMAT object configures the format of the string returned by the + toFormat method. +
+
+ The example below shows the properties of the FORMAT object that are + recognised, and their default values. +
+
+ Unlike the other configuration properties, the values of the properties of the + FORMAT object will not be checked for validity. The existing + FORMAT object will simply be replaced by the object that is passed in. + The object can include any number of the properties shown below. +
+
See toFormat for examples of usage.
+
+
+BigNumber.config({
+  FORMAT: {
+    // string to prepend
+    prefix: '',
+    // decimal separator
+    decimalSeparator: '.',
+    // grouping separator of the integer part
+    groupSeparator: ',',
+    // primary grouping size of the integer part
+    groupSize: 3,
+    // secondary grouping size of the integer part
+    secondaryGroupSize: 0,
+    // grouping separator of the fraction part
+    fractionGroupSeparator: ' ',
+    // grouping size of the fraction part
+    fractionGroupSize: 0,
+    // string to append
+    suffix: ''
+  }
+});
+
+ + + +
ALPHABET
+
+ string
+ Default value: '0123456789abcdefghijklmnopqrstuvwxyz' +
+
+ The alphabet used for base conversion. The length of the alphabet corresponds to the + maximum value of the base argument that can be passed to the + BigNumber constructor or + toString. +
+
+ There is no maximum length for the alphabet, but it must be at least 2 characters long, and + it must not contain whitespace or a repeated character, or the sign indicators + '+' and '-', or the decimal separator '.'. +
+
+
// duodecimal (base 12)
+BigNumber.config({ ALPHABET: '0123456789TE' })
+x = new BigNumber('T', 12)
+x.toString()                // '10'
+x.toString(12)              // 'T'
+
+ + + +
+

+

Returns an object with the above properties and their current values.

+

+ Throws if object is not an object, or if an invalid value is assigned to + one or more of the above properties. See Errors. +

+
+BigNumber.config({
+  DECIMAL_PLACES: 40,
+  ROUNDING_MODE: BigNumber.ROUND_HALF_CEIL,
+  EXPONENTIAL_AT: [-10, 20],
+  RANGE: [-500, 500],
+  CRYPTO: true,
+  MODULO_MODE: BigNumber.ROUND_FLOOR,
+  POW_PRECISION: 80,
+  FORMAT: {
+    groupSize: 3,
+    groupSeparator: ' ',
+    decimalSeparator: ','
+  },
+  ALPHABET: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_'
+});
+
+obj = BigNumber.config();
+obj.DECIMAL_PLACES        // 40
+obj.RANGE                 // [-500, 500]
+ + + +
+ isBigNumber.isBigNumber(value) ⇒ boolean +
+

value: any

+

+ Returns true if value is a BigNumber instance, otherwise returns + false. +

+
x = 42
+y = new BigNumber(x)
+
+BigNumber.isBigNumber(x)             // false
+y instanceof BigNumber               // true
+BigNumber.isBigNumber(y)             // true
+
+BN = BigNumber.clone();
+z = new BN(x)
+z instanceof BigNumber               // false
+BigNumber.isBigNumber(z)             // true
+

+ If value is a BigNumber instance and BigNumber.DEBUG is true, + then this method will also check if value is well-formed, and throw if it is not. + See Errors. +

+

+ The check can be useful if creating a BigNumber from an object literal. + See BigNumber. +

+
+x = new BigNumber(10)
+
+// Change x.c to an illegitimate value.
+x.c = NaN
+
+BigNumber.DEBUG = false
+
+// No error.
+BigNumber.isBigNumber(x)    // true
+
+BigNumber.DEBUG = true
+
+// Error.
+BigNumber.isBigNumber(x)    // '[BigNumber Error] Invalid BigNumber'
+ + + +
maximum.max(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the maximum of the arguments. +

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.maximum(4e9, x, '123456789.9')      // '4000000000'
+
+arr = [12, '13', new BigNumber(14)]
+BigNumber.max.apply(null, arr)                // '14'
+ + + +
minimum.min(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the minimum of the arguments. +

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.minimum(4e9, x, '123456789.9')      // '123456789.9'
+
+arr = [2, new BigNumber(-14), '-15.9999', -12]
+BigNumber.min.apply(null, arr)                // '-15.9999'
+ + + +
+ random.random([dp]) ⇒ BigNumber +
+

dp: number: integer, 0 to 1e+9 inclusive

+

+ Returns a new BigNumber with a pseudo-random value equal to or greater than 0 and + less than 1. +

+

+ The return value will have dp decimal places (or less if trailing zeros are + produced).
+ If dp is omitted then the number of decimal places will default to the current + DECIMAL_PLACES setting. +

+

+ Depending on the value of this BigNumber constructor's + CRYPTO setting and the support for the + crypto object in the host environment, the random digits of the return value are + generated by either Math.random (fastest), crypto.getRandomValues + (Web Cryptography API in recent browsers) or crypto.randomBytes (Node.js). +

+

+ To be able to set CRYPTO to true when using + Node.js, the crypto object must be available globally: +

+
// Node.js
+const crypto = require('crypto');   // CommonJS
+import * as crypto from 'crypto';   // ES module
+global.crypto = crypto;
+

+ If CRYPTO is true, i.e. one of the + crypto methods is to be used, the value of a returned BigNumber should be + cryptographically-secure and statistically indistinguishable from a random value. +

+

+ Throws if dp is invalid. See Errors. +

+
BigNumber.config({ DECIMAL_PLACES: 10 })
+BigNumber.random()              // '0.4117936847'
+BigNumber.random(20)            // '0.78193327636914089009'
+ + + +
sum.sum(n...) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the sum of the arguments.

+

The return value is always exact and unrounded.

+
x = new BigNumber('3257869345.0378653')
+BigNumber.sum(4e9, x, '123456789.9')      // '7381326134.9378653'
+
+arr = [2, new BigNumber(14), '15.9999', 12]
+BigNumber.sum.apply(null, arr)            // '43.9999'
+ + + +

Properties

+

+ The library's enumerated rounding modes are stored as properties of the constructor.
+ (They are not referenced internally by the library itself.) +

+

+ Rounding modes 0 to 6 (inclusive) are the same as those of Java's + BigDecimal class. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyValueDescription
ROUND_UP0Rounds away from zero
ROUND_DOWN1Rounds towards zero
ROUND_CEIL2Rounds towards Infinity
ROUND_FLOOR3Rounds towards -Infinity
ROUND_HALF_UP4 + Rounds towards nearest neighbour.
+ If equidistant, rounds away from zero +
ROUND_HALF_DOWN5 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards zero +
ROUND_HALF_EVEN6 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards even neighbour +
ROUND_HALF_CEIL7 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards Infinity +
ROUND_HALF_FLOOR8 + Rounds towards nearest neighbour.
+ If equidistant, rounds towards -Infinity +
+
+BigNumber.config({ ROUNDING_MODE: BigNumber.ROUND_CEIL })
+BigNumber.config({ ROUNDING_MODE: 2 })     // equivalent
+ +
DEBUG
+

undefined|false|true

+

+ If BigNumber.DEBUG is set true then an error will be thrown + if this BigNumber constructor receives an invalid value, such as + a value of type number with more than 15 significant digits. + See BigNumber. +

+

+ An error will also be thrown if the isBigNumber + method receives a BigNumber that is not well-formed. + See isBigNumber. +

+
BigNumber.DEBUG = true
+ + +

INSTANCE

+ + +

Methods

+

The methods inherited by a BigNumber instance from its constructor's prototype object.

+

A BigNumber is immutable in the sense that it is not changed by its methods.

+

+ The treatment of ±0, ±Infinity and NaN is + consistent with how JavaScript treats these values. +

+

Many method names have a shorter alias.

+ + + +
absoluteValue.abs() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the absolute value, i.e. the magnitude, of the value of + this BigNumber. +

+

The return value is always exact and unrounded.

+
+x = new BigNumber(-0.8)
+y = x.absoluteValue()           // '0.8'
+z = y.abs()                     // '0.8'
+ + + +
+ comparedTo.comparedTo(n [, base]) ⇒ number +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+ + + + + + + + + + + + + + + + + + +
Returns 
1If the value of this BigNumber is greater than the value of n
-1If the value of this BigNumber is less than the value of n
0If this BigNumber and n have the same value
nullIf the value of either this BigNumber or n is NaN
+
+x = new BigNumber(Infinity)
+y = new BigNumber(5)
+x.comparedTo(y)                 // 1
+x.comparedTo(x.minus(1))        // 0
+y.comparedTo(NaN)               // null
+y.comparedTo('110', 2)          // -1
+ + + +
+ decimalPlaces.dp([dp [, rm]]) ⇒ BigNumber|number +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ If dp is a number, returns a BigNumber whose value is the value of this BigNumber + rounded by rounding mode rm to a maximum of dp decimal places. +

+

+ If dp is omitted, or is null or undefined, the return + value is the number of decimal places of the value of this BigNumber, or null if + the value of this BigNumber is ±Infinity or NaN. +

+

+ If rm is omitted, or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = new BigNumber(1234.56)
+x.decimalPlaces(1)                     // '1234.6'
+x.dp()                                 // 2
+x.decimalPlaces(2)                     // '1234.56'
+x.dp(10)                               // '1234.56'
+x.decimalPlaces(0, 1)                  // '1234'
+x.dp(0, 6)                             // '1235'
+x.decimalPlaces(1, 1)                  // '1234.5'
+x.dp(1, BigNumber.ROUND_HALF_EVEN)     // '1234.6'
+x                                      // '1234.56'
+y = new BigNumber('9.9e-101')
+y.dp()                                 // 102
+ + + +
dividedBy.div(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber divided by + n, rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+
+x = new BigNumber(355)
+y = new BigNumber(113)
+x.dividedBy(y)                  // '3.14159292035398230088'
+x.div(5)                        // '71'
+x.div(47, 16)                   // '5'
+ + + +
+ dividedToIntegerBy.idiv(n [, base]) ⇒ + BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the integer part of dividing the value of this BigNumber by + n. +

+
+x = new BigNumber(5)
+y = new BigNumber(3)
+x.dividedToIntegerBy(y)         // '1'
+x.idiv(0.7)                     // '7'
+x.idiv('0.f', 16)               // '5'
+ + + +
+ exponentiatedBy.pow(n [, m]) ⇒ BigNumber +
+

+ n: number|string|BigNumber: integer
+ m: number|string|BigNumber +

+

+ Returns a BigNumber whose value is the value of this BigNumber exponentiated by + n, i.e. raised to the power n, and optionally modulo a modulus + m. +

+

+ Throws if n is not an integer. See Errors. +

+

+ If n is negative the result is rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+

+ As the number of digits of the result of the power operation can grow so large so quickly, + e.g. 123.45610000 has over 50000 digits, the number of significant + digits calculated is limited to the value of the + POW_PRECISION setting (unless a modulus + m is specified). +

+

+ By default POW_PRECISION is set to 0. + This means that an unlimited number of significant digits will be calculated, and that the + method's performance will decrease dramatically for larger exponents. +

+

+ If m is specified and the value of m, n and this + BigNumber are integers, and n is positive, then a fast modular exponentiation + algorithm is used, otherwise the operation will be performed as + x.exponentiatedBy(n).modulo(m) with a + POW_PRECISION of 0. +

+
+Math.pow(0.7, 2)                // 0.48999999999999994
+x = new BigNumber(0.7)
+x.exponentiatedBy(2)            // '0.49'
+BigNumber(3).pow(-2)            // '0.11111111111111111111'
+ + + +
+ integerValue.integerValue([rm]) ⇒ BigNumber +
+

+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a BigNumber whose value is the value of this BigNumber rounded to an integer using + rounding mode rm. +

+

+ If rm is omitted, or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if rm is invalid. See Errors. +

+
+x = new BigNumber(123.456)
+x.integerValue()                        // '123'
+x.integerValue(BigNumber.ROUND_CEIL)    // '124'
+y = new BigNumber(-12.7)
+y.integerValue()                        // '-13'
+y.integerValue(BigNumber.ROUND_DOWN)    // '-12'
+

+ The following is an example of how to add a prototype method that emulates JavaScript's + Math.round function. Math.ceil, Math.floor and + Math.trunc can be emulated in the same way with + BigNumber.ROUND_CEIL, BigNumber.ROUND_FLOOR and + BigNumber.ROUND_DOWN respectively. +

+
+BigNumber.prototype.round = function () {
+  return this.integerValue(BigNumber.ROUND_HALF_CEIL);
+};
+x.round()                               // '123'
+ + + +
isEqualTo.eq(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is equal to the value of + n, otherwise returns false.
+ As with JavaScript, NaN does not equal NaN. +

+

Note: This method uses the comparedTo method internally.

+
+0 === 1e-324                    // true
+x = new BigNumber(0)
+x.isEqualTo('1e-324')           // false
+BigNumber(-0).eq(x)             // true  ( -0 === 0 )
+BigNumber(255).eq('ff', 16)     // true
+
+y = new BigNumber(NaN)
+y.isEqualTo(NaN)                // false
+ + + +
isFinite.isFinite() ⇒ boolean
+

+ Returns true if the value of this BigNumber is a finite number, otherwise + returns false. +

+

+ The only possible non-finite values of a BigNumber are NaN, Infinity + and -Infinity. +

+
+x = new BigNumber(1)
+x.isFinite()                    // true
+y = new BigNumber(Infinity)
+y.isFinite()                    // false
+

+ Note: The native method isFinite() can be used if + n <= Number.MAX_VALUE. +

+ + + +
isGreaterThan.gt(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is greater than the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+0.1 > (0.3 - 0.2)                             // true
+x = new BigNumber(0.1)
+x.isGreaterThan(BigNumber(0.3).minus(0.2))    // false
+BigNumber(0).gt(x)                            // false
+BigNumber(11, 3).gt(11.1, 2)                  // true
+ + + +
+ isGreaterThanOrEqualTo.gte(n [, base]) ⇒ boolean +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is greater than or equal to the value + of n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+(0.3 - 0.2) >= 0.1                     // false
+x = new BigNumber(0.3).minus(0.2)
+x.isGreaterThanOrEqualTo(0.1)          // true
+BigNumber(1).gte(x)                    // true
+BigNumber(10, 18).gte('i', 36)         // true
+ + + +
isInteger.isInteger() ⇒ boolean
+

+ Returns true if the value of this BigNumber is an integer, otherwise returns + false. +

+
+x = new BigNumber(1)
+x.isInteger()                   // true
+y = new BigNumber(123.456)
+y.isInteger()                   // false
+ + + +
isLessThan.lt(n [, base]) ⇒ boolean
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is less than the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+(0.3 - 0.2) < 0.1                       // true
+x = new BigNumber(0.3).minus(0.2)
+x.isLessThan(0.1)                       // false
+BigNumber(0).lt(x)                      // true
+BigNumber(11.1, 2).lt(11, 3)            // true
+ + + +
+ isLessThanOrEqualTo.lte(n [, base]) ⇒ boolean +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns true if the value of this BigNumber is less than or equal to the value of + n, otherwise returns false. +

+

Note: This method uses the comparedTo method internally.

+
+0.1 <= (0.3 - 0.2)                                // false
+x = new BigNumber(0.1)
+x.isLessThanOrEqualTo(BigNumber(0.3).minus(0.2))  // true
+BigNumber(-1).lte(x)                              // true
+BigNumber(10, 18).lte('i', 36)                    // true
+ + + +
isNaN.isNaN() ⇒ boolean
+

+ Returns true if the value of this BigNumber is NaN, otherwise + returns false. +

+
+x = new BigNumber(NaN)
+x.isNaN()                       // true
+y = new BigNumber('Infinity')
+y.isNaN()                       // false
+

Note: The native method isNaN() can also be used.

+ + + +
isNegative.isNegative() ⇒ boolean
+

+ Returns true if the sign of this BigNumber is negative, otherwise returns + false. +

+
+x = new BigNumber(-0)
+x.isNegative()                  // true
+y = new BigNumber(2)
+y.isNegative()                  // false
+

Note: n < 0 can be used if n <= -Number.MIN_VALUE.

+ + + +
isPositive.isPositive() ⇒ boolean
+

+ Returns true if the sign of this BigNumber is positive, otherwise returns + false. +

+
+x = new BigNumber(-0)
+x.isPositive()                  // false
+y = new BigNumber(2)
+y.isPositive()                  // true
+ + + +
isZero.isZero() ⇒ boolean
+

+ Returns true if the value of this BigNumber is zero or minus zero, otherwise + returns false. +

+
+x = new BigNumber(-0)
+x.isZero() && x.isNegative()         // true
+y = new BigNumber(Infinity)
+y.isZero()                      // false
+

Note: n == 0 can be used if n >= Number.MIN_VALUE.

+ + + +
+ minus.minus(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the value of this BigNumber minus n.

+

The return value is always exact and unrounded.

+
+0.3 - 0.1                       // 0.19999999999999998
+x = new BigNumber(0.3)
+x.minus(0.1)                    // '0.2'
+x.minus(0.6, 20)                // '0'
+ + + +
modulo.mod(n [, base]) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber modulo n, i.e. + the integer remainder of dividing this BigNumber by n. +

+

+ The value returned, and in particular its sign, is dependent on the value of the + MODULO_MODE setting of this BigNumber constructor. + If it is 1 (default value), the result will have the same sign as this BigNumber, + and it will match that of Javascript's % operator (within the limits of double + precision) and BigDecimal's remainder method. +

+

The return value is always exact and unrounded.

+

+ See MODULO_MODE for a description of the other + modulo modes. +

+
+1 % 0.9                         // 0.09999999999999998
+x = new BigNumber(1)
+x.modulo(0.9)                   // '0.1'
+y = new BigNumber(33)
+y.mod('a', 33)                  // '3'
+ + + +
+ multipliedBy.times(n [, base]) ⇒ BigNumber +
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

+ Returns a BigNumber whose value is the value of this BigNumber multiplied by n. +

+

The return value is always exact and unrounded.

+
+0.6 * 3                         // 1.7999999999999998
+x = new BigNumber(0.6)
+y = x.multipliedBy(3)           // '1.8'
+BigNumber('7e+500').times(y)    // '1.26e+501'
+x.multipliedBy('-a', 16)        // '-6'
+ + + +
negated.negated() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the value of this BigNumber negated, i.e. multiplied by + -1. +

+
+x = new BigNumber(1.8)
+x.negated()                     // '-1.8'
+y = new BigNumber(-1.3)
+y.negated()                     // '1.3'
+ + + +
plus.plus(n [, base]) ⇒ BigNumber
+

+ n: number|string|BigNumber
+ base: number
+ See BigNumber for further parameter details. +

+

Returns a BigNumber whose value is the value of this BigNumber plus n.

+

The return value is always exact and unrounded.

+
+0.1 + 0.2                       // 0.30000000000000004
+x = new BigNumber(0.1)
+y = x.plus(0.2)                 // '0.3'
+BigNumber(0.7).plus(x).plus(y)  // '1.1'
+x.plus('0.1', 8)                // '0.225'
+ + + +
+ precision.sd([d [, rm]]) ⇒ BigNumber|number +
+

+ d: number|boolean: integer, 1 to 1e+9 + inclusive, or true or false
+ rm: number: integer, 0 to 8 inclusive. +

+

+ If d is a number, returns a BigNumber whose value is the value of this BigNumber + rounded to a precision of d significant digits using rounding mode + rm. +

+

+ If d is omitted or is null or undefined, the return + value is the number of significant digits of the value of this BigNumber, or null + if the value of this BigNumber is ±Infinity or NaN. +

+

+ If d is true then any trailing zeros of the integer + part of a number are counted as significant digits, otherwise they are not. +

+

+ If rm is omitted or is null or undefined, + ROUNDING_MODE will be used. +

+

+ Throws if d or rm is invalid. See Errors. +

+
+x = new BigNumber(9876.54321)
+x.precision(6)                         // '9876.54'
+x.sd()                                 // 9
+x.precision(6, BigNumber.ROUND_UP)     // '9876.55'
+x.sd(2)                                // '9900'
+x.precision(2, 1)                      // '9800'
+x                                      // '9876.54321'
+y = new BigNumber(987000)
+y.precision()                          // 3
+y.sd(true)                             // 6
+ + + +
shiftedBy.shiftedBy(n) ⇒ BigNumber
+

+ n: number: integer, + -9007199254740991 to 9007199254740991 inclusive +

+

+ Returns a BigNumber whose value is the value of this BigNumber shifted by n + places. +

+ The shift is of the decimal point, i.e. of powers of ten, and is to the left if n + is negative or to the right if n is positive. +

+

The return value is always exact and unrounded.

+

+ Throws if n is invalid. See Errors. +

+
+x = new BigNumber(1.23)
+x.shiftedBy(3)                      // '1230'
+x.shiftedBy(-3)                     // '0.00123'
+ + + +
squareRoot.sqrt() ⇒ BigNumber
+

+ Returns a BigNumber whose value is the square root of the value of this BigNumber, + rounded according to the current + DECIMAL_PLACES and + ROUNDING_MODE settings. +

+

+ The return value will be correctly rounded, i.e. rounded as if the result was first calculated + to an infinite number of correct digits before rounding. +

+
+x = new BigNumber(16)
+x.squareRoot()                  // '4'
+y = new BigNumber(3)
+y.sqrt()                        // '1.73205080756887729353'
+ + + +
+ toExponential.toExponential([dp [, rm]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber in exponential notation rounded + using rounding mode rm to dp decimal places, i.e with one digit + before the decimal point and dp digits after it. +

+

+ If the value of this BigNumber in exponential notation has fewer than dp fraction + digits, the return value will be appended with zeros accordingly. +

+

+ If dp is omitted, or is null or undefined, the number + of digits after the decimal point defaults to the minimum number of digits necessary to + represent the value exactly.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = 45.6
+y = new BigNumber(x)
+x.toExponential()               // '4.56e+1'
+y.toExponential()               // '4.56e+1'
+x.toExponential(0)              // '5e+1'
+y.toExponential(0)              // '5e+1'
+x.toExponential(1)              // '4.6e+1'
+y.toExponential(1)              // '4.6e+1'
+y.toExponential(1, 1)           // '4.5e+1'  (ROUND_DOWN)
+x.toExponential(3)              // '4.560e+1'
+y.toExponential(3)              // '4.560e+1'
+ + + +
+ toFixed.toFixed([dp [, rm]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber in normal (fixed-point) notation + rounded to dp decimal places using rounding mode rm. +

+

+ If the value of this BigNumber in normal notation has fewer than dp fraction + digits, the return value will be appended with zeros accordingly. +

+

+ Unlike Number.prototype.toFixed, which returns exponential notation if a number + is greater or equal to 1021, this method will always return normal + notation. +

+

+ If dp is omitted or is null or undefined, the return + value will be unrounded and in normal notation. This is also unlike + Number.prototype.toFixed, which returns the value to zero decimal places.
+ It is useful when fixed-point notation is required and the current + EXPONENTIAL_AT setting causes + toString to return exponential notation.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if dp or rm is invalid. See Errors. +

+
+x = 3.456
+y = new BigNumber(x)
+x.toFixed()                     // '3'
+y.toFixed()                     // '3.456'
+y.toFixed(0)                    // '3'
+x.toFixed(2)                    // '3.46'
+y.toFixed(2)                    // '3.46'
+y.toFixed(2, 1)                 // '3.45'  (ROUND_DOWN)
+x.toFixed(5)                    // '3.45600'
+y.toFixed(5)                    // '3.45600'
+ + + +
+ toFormat.toFormat([dp [, rm[, format]]]) ⇒ string +
+

+ dp: number: integer, 0 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive
+ format: object: see FORMAT +

+

+

+ Returns a string representing the value of this BigNumber in normal (fixed-point) notation + rounded to dp decimal places using rounding mode rm, and formatted + according to the properties of the format object. +

+

+ See FORMAT and the examples below for the properties of the + format object, their types, and their usage. A formatting object may contain + some or all of the recognised properties. +

+

+ If dp is omitted or is null or undefined, then the + return value is not rounded to a fixed number of decimal places.
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used.
+ If format is omitted or is null or undefined, the + FORMAT object is used. +

+

+ Throws if dp, rm or format is invalid. See + Errors. +

+
+fmt = {
+  prefix: '',
+  decimalSeparator: '.',
+  groupSeparator: ',',
+  groupSize: 3,
+  secondaryGroupSize: 0,
+  fractionGroupSeparator: ' ',
+  fractionGroupSize: 0,
+  suffix: ''
+}
+
+x = new BigNumber('123456789.123456789')
+
+// Set the global formatting options
+BigNumber.config({ FORMAT: fmt })
+
+x.toFormat()                              // '123,456,789.123456789'
+x.toFormat(3)                             // '123,456,789.123'
+
+// If a reference to the object assigned to FORMAT has been retained,
+// the format properties can be changed directly
+fmt.groupSeparator = ' '
+fmt.fractionGroupSize = 5
+x.toFormat()                              // '123 456 789.12345 6789'
+
+// Alternatively, pass the formatting options as an argument
+fmt = {
+  prefix: '=> ',
+  decimalSeparator: ',',
+  groupSeparator: '.',
+  groupSize: 3,
+  secondaryGroupSize: 2
+}
+
+x.toFormat()                              // '123 456 789.12345 6789'
+x.toFormat(fmt)                           // '=> 12.34.56.789,123456789'
+x.toFormat(2, fmt)                        // '=> 12.34.56.789,12'
+x.toFormat(3, BigNumber.ROUND_UP, fmt)    // '=> 12.34.56.789,124'
+ + + +
+ toFraction.toFraction([maximum_denominator]) + ⇒ [BigNumber, BigNumber] +
+

+ maximum_denominator: + number|string|BigNumber: integer >= 1 and <= + Infinity +

+

+ Returns an array of two BigNumbers representing the value of this BigNumber as a simple + fraction with an integer numerator and an integer denominator. The denominator will be a + positive non-zero value less than or equal to maximum_denominator. +

+

+ If a maximum_denominator is not specified, or is null or + undefined, the denominator will be the lowest value necessary to represent the + number exactly. +

+

+ Throws if maximum_denominator is invalid. See Errors. +

+
+x = new BigNumber(1.75)
+x.toFraction()                  // '7, 4'
+
+pi = new BigNumber('3.14159265358')
+pi.toFraction()                 // '157079632679,50000000000'
+pi.toFraction(100000)           // '312689, 99532'
+pi.toFraction(10000)            // '355, 113'
+pi.toFraction(100)              // '311, 99'
+pi.toFraction(10)               // '22, 7'
+pi.toFraction(1)                // '3, 1'
+ + + +
toJSON.toJSON() ⇒ string
+

As valueOf.

+
+x = new BigNumber('177.7e+457')
+y = new BigNumber(235.4325)
+z = new BigNumber('0.0098074')
+
+// Serialize an array of three BigNumbers
+str = JSON.stringify( [x, y, z] )
+// "["1.777e+459","235.4325","0.0098074"]"
+
+// Return an array of three BigNumbers
+JSON.parse(str, function (key, val) {
+    return key === '' ? val : new BigNumber(val)
+})
+ + + +
toNumber.toNumber() ⇒ number
+

Returns the value of this BigNumber as a JavaScript number primitive.

+

+ This method is identical to using type coercion with the unary plus operator. +

+
+x = new BigNumber(456.789)
+x.toNumber()                    // 456.789
++x                              // 456.789
+
+y = new BigNumber('45987349857634085409857349856430985')
+y.toNumber()                    // 4.598734985763409e+34
+
+z = new BigNumber(-0)
+1 / z.toNumber()                // -Infinity
+1 / +z                          // -Infinity
+ + + +
+ toPrecision.toPrecision([sd [, rm]]) ⇒ string +
+

+ sd: number: integer, 1 to 1e+9 inclusive
+ rm: number: integer, 0 to 8 inclusive +

+

+ Returns a string representing the value of this BigNumber rounded to sd + significant digits using rounding mode rm. +

+

+ If sd is less than the number of digits necessary to represent the integer part + of the value in normal (fixed-point) notation, then exponential notation is used. +

+

+ If sd is omitted, or is null or undefined, then the + return value is the same as n.toString().
+ If rm is omitted or is null or undefined, + ROUNDING_MODE is used. +

+

+ Throws if sd or rm is invalid. See Errors. +

+
+x = 45.6
+y = new BigNumber(x)
+x.toPrecision()                 // '45.6'
+y.toPrecision()                 // '45.6'
+x.toPrecision(1)                // '5e+1'
+y.toPrecision(1)                // '5e+1'
+y.toPrecision(2, 0)             // '4.6e+1'  (ROUND_UP)
+y.toPrecision(2, 1)             // '4.5e+1'  (ROUND_DOWN)
+x.toPrecision(5)                // '45.600'
+y.toPrecision(5)                // '45.600'
+ + + +
toString.toString([base]) ⇒ string
+

+ base: number: integer, 2 to ALPHABET.length + inclusive (see ALPHABET). +

+

+ Returns a string representing the value of this BigNumber in the specified base, or base + 10 if base is omitted or is null or + undefined. +

+

+ For bases above 10, and using the default base conversion alphabet + (see ALPHABET), values from 10 to + 35 are represented by a-z + (as with Number.prototype.toString). +

+

+ If a base is specified the value is rounded according to the current + DECIMAL_PLACES + and ROUNDING_MODE settings. +

+

+ If a base is not specified, and this BigNumber has a positive + exponent that is equal to or greater than the positive component of the + current EXPONENTIAL_AT setting, + or a negative exponent equal to or less than the negative component of the + setting, then exponential notation is returned. +

+

If base is null or undefined it is ignored.

+

+ Throws if base is invalid. See Errors. +

+
+x = new BigNumber(750000)
+x.toString()                    // '750000'
+BigNumber.config({ EXPONENTIAL_AT: 5 })
+x.toString()                    // '7.5e+5'
+
+y = new BigNumber(362.875)
+y.toString(2)                   // '101101010.111'
+y.toString(9)                   // '442.77777777777777777778'
+y.toString(32)                  // 'ba.s'
+
+BigNumber.config({ DECIMAL_PLACES: 4 });
+z = new BigNumber('1.23456789')
+z.toString()                    // '1.23456789'
+z.toString(10)                  // '1.2346'
+ + + +
valueOf.valueOf() ⇒ string
+

+ As toString, but does not accept a base argument and includes + the minus sign for negative zero. +

+
+x = new BigNumber('-0')
+x.toString()                    // '0'
+x.valueOf()                     // '-0'
+y = new BigNumber('1.777e+457')
+y.valueOf()                     // '1.777e+457'
+ + + +

Properties

+

The properties of a BigNumber instance:

+ + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescriptionTypeValue
ccoefficient*number[] Array of base 1e14 numbers
eexponentnumberInteger, -1000000000 to 1000000000 inclusive
ssignnumber-1 or 1
+

*significand

+

+ The value of any of the c, e and s properties may also + be null. +

+

+ The above properties are best considered to be read-only. In early versions of this library it + was okay to change the exponent of a BigNumber by writing to its exponent property directly, + but this is no longer reliable as the value of the first element of the coefficient array is + now dependent on the exponent. +

+

+ Note that, as with JavaScript numbers, the original exponent and fractional trailing zeros are + not necessarily preserved. +

+
x = new BigNumber(0.123)              // '0.123'
+x.toExponential()                     // '1.23e-1'
+x.c                                   // '1,2,3'
+x.e                                   // -1
+x.s                                   // 1
+
+y = new Number(-123.4567000e+2)       // '-12345.67'
+y.toExponential()                     // '-1.234567e+4'
+z = new BigNumber('-123.4567000e+2')  // '-12345.67'
+z.toExponential()                     // '-1.234567e+4'
+z.c                                   // '1,2,3,4,5,6,7'
+z.e                                   // 4
+z.s                                   // -1
+ + + +

Zero, NaN and Infinity

+

+ The table below shows how ±0, NaN and + ±Infinity are stored. +

+ + + + + + + + + + + + + + + + + + + + + + + + + +
ces
±0[0]0±1
NaNnullnullnull
±Infinitynullnull±1
+
+x = new Number(-0)              // 0
+1 / x == -Infinity              // true
+
+y = new BigNumber(-0)           // '0'
+y.c                             // '0' ( [0].toString() )
+y.e                             // 0
+y.s                             // -1
+ + + +

Errors

+

The table below shows the errors that are thrown.

+

+ The errors are generic Error objects whose message begins + '[BigNumber Error]'. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodThrows
+ BigNumber
+ comparedTo
+ dividedBy
+ dividedToIntegerBy
+ isEqualTo
+ isGreaterThan
+ isGreaterThanOrEqualTo
+ isLessThan
+ isLessThanOrEqualTo
+ minus
+ modulo
+ plus
+ multipliedBy +
Base not a primitive number
Base not an integer
Base out of range
Number primitive has more than 15 significant digits*
Not a base... number*
Not a number*
cloneObject expected
configObject expected
DECIMAL_PLACES not a primitive number
DECIMAL_PLACES not an integer
DECIMAL_PLACES out of range
ROUNDING_MODE not a primitive number
ROUNDING_MODE not an integer
ROUNDING_MODE out of range
EXPONENTIAL_AT not a primitive number
EXPONENTIAL_AT not an integer
EXPONENTIAL_AT out of range
RANGE not a primitive number
RANGE not an integer
RANGE cannot be zero
RANGE cannot be zero
CRYPTO not true or false
crypto unavailable
MODULO_MODE not a primitive number
MODULO_MODE not an integer
MODULO_MODE out of range
POW_PRECISION not a primitive number
POW_PRECISION not an integer
POW_PRECISION out of range
FORMAT not an object
ALPHABET invalid
+ decimalPlaces
+ precision
+ random
+ shiftedBy
+ toExponential
+ toFixed
+ toFormat
+ toPrecision +
Argument not a primitive number
Argument not an integer
Argument out of range
+ decimalPlaces
+ precision +
Argument not true or false
exponentiatedByArgument not an integer
isBigNumberInvalid BigNumber*
+ minimum
+ maximum +
Not a number*
+ random + crypto unavailable
+ toFormat + Argument not an object
toFractionArgument not an integer
Argument out of range
toStringBase not a primitive number
Base not an integer
Base out of range
+

*Only thrown if BigNumber.DEBUG is true.

+

To determine if an exception is a BigNumber Error:

+
+try {
+  // ...
+} catch (e) {
+  if (e instanceof Error && e.message.indexOf('[BigNumber Error]') === 0) {
+      // ...
+  }
+}
+ + + +

Type coercion

+

+ To prevent the accidental use of a BigNumber in primitive number operations, or the + accidental addition of a BigNumber to a string, the valueOf method can be safely + overwritten as shown below. +

+

+ The valueOf method is the same as the + toJSON method, and both are the same as the + toString method except they do not take a base + argument and they include the minus sign for negative zero. +

+
+BigNumber.prototype.valueOf = function () {
+  throw Error('valueOf called!')
+}
+
+x = new BigNumber(1)
+x / 2                    // '[BigNumber Error] valueOf called!'
+x + 'abc'                // '[BigNumber Error] valueOf called!'
+
+ + + +

FAQ

+ +
Why are trailing fractional zeros removed from BigNumbers?
+

+ Some arbitrary-precision libraries retain trailing fractional zeros as they can indicate the + precision of a value. This can be useful but the results of arithmetic operations can be + misleading. +

+
+x = new BigDecimal("1.0")
+y = new BigDecimal("1.1000")
+z = x.add(y)                      // 2.1000
+
+x = new BigDecimal("1.20")
+y = new BigDecimal("3.45000")
+z = x.multiply(y)                 // 4.1400000
+

+ To specify the precision of a value is to specify that the value lies + within a certain range. +

+

+ In the first example, x has a value of 1.0. The trailing zero shows + the precision of the value, implying that it is in the range 0.95 to + 1.05. Similarly, the precision indicated by the trailing zeros of y + indicates that the value is in the range 1.09995 to 1.10005. +

+

+ If we add the two lowest values in the ranges we have, 0.95 + 1.09995 = 2.04995, + and if we add the two highest values we have, 1.05 + 1.10005 = 2.15005, so the + range of the result of the addition implied by the precision of its operands is + 2.04995 to 2.15005. +

+

+ The result given by BigDecimal of 2.1000 however, indicates that the value is in + the range 2.09995 to 2.10005 and therefore the precision implied by + its trailing zeros may be misleading. +

+

+ In the second example, the true range is 4.122744 to 4.157256 yet + the BigDecimal answer of 4.1400000 indicates a range of 4.13999995 + to 4.14000005. Again, the precision implied by the trailing zeros may be + misleading. +

+

+ This library, like binary floating point and most calculators, does not retain trailing + fractional zeros. Instead, the toExponential, toFixed and + toPrecision methods enable trailing zeros to be added if and when required.
+

+
+ + + diff --git a/node_modules/bignumber.js/package.json b/node_modules/bignumber.js/package.json new file mode 100644 index 0000000..e0571a0 --- /dev/null +++ b/node_modules/bignumber.js/package.json @@ -0,0 +1,50 @@ +{ + "name": "bignumber.js", + "description": "A library for arbitrary-precision decimal and non-decimal arithmetic", + "version": "9.1.2", + "keywords": [ + "arbitrary", + "precision", + "arithmetic", + "big", + "number", + "decimal", + "float", + "biginteger", + "bigdecimal", + "bignumber", + "bigint", + "bignum" + ], + "repository": { + "type": "git", + "url": "https://github.com/MikeMcl/bignumber.js.git" + }, + "main": "bignumber", + "module": "bignumber.mjs", + "browser": "bignumber.js", + "types": "bignumber.d.ts", + "exports": { + ".": { + "types": "./bignumber.d.ts", + "require": "./bignumber.js", + "import": "./bignumber.mjs", + "browser": "./bignumber.js" + }, + "./bignumber.mjs": "./bignumber.mjs", + "./bignumber.js": "./bignumber.js", + "./package.json": "./package.json" + }, + "author": { + "name": "Michael Mclaughlin", + "email": "M8ch88l@gmail.com" + }, + "engines": { + "node": "*" + }, + "license": "MIT", + "scripts": { + "test": "node test/test" + }, + "dependencies": {} +} diff --git a/node_modules/buffer-equal-constant-time/.npmignore b/node_modules/buffer-equal-constant-time/.npmignore new file mode 100644 index 0000000..34e4f5c --- /dev/null +++ b/node_modules/buffer-equal-constant-time/.npmignore @@ -0,0 +1,2 @@ +.*.sw[mnop] +node_modules/ diff --git a/node_modules/buffer-equal-constant-time/.travis.yml b/node_modules/buffer-equal-constant-time/.travis.yml new file mode 100644 index 0000000..78e1c01 --- /dev/null +++ b/node_modules/buffer-equal-constant-time/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: +- "0.11" +- "0.10" diff --git a/node_modules/buffer-equal-constant-time/LICENSE.txt b/node_modules/buffer-equal-constant-time/LICENSE.txt new file mode 100644 index 0000000..9a064f3 --- /dev/null +++ b/node_modules/buffer-equal-constant-time/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) 2013, GoInstant Inc., a salesforce.com company +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of salesforce.com, nor GoInstant, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/buffer-equal-constant-time/README.md b/node_modules/buffer-equal-constant-time/README.md new file mode 100644 index 0000000..4f227f5 --- /dev/null +++ b/node_modules/buffer-equal-constant-time/README.md @@ -0,0 +1,50 @@ +# buffer-equal-constant-time + +Constant-time `Buffer` comparison for node.js. Should work with browserify too. + +[![Build Status](https://travis-ci.org/goinstant/buffer-equal-constant-time.png?branch=master)](https://travis-ci.org/goinstant/buffer-equal-constant-time) + +```sh + npm install buffer-equal-constant-time +``` + +# Usage + +```js + var bufferEq = require('buffer-equal-constant-time'); + + var a = new Buffer('asdf'); + var b = new Buffer('asdf'); + if (bufferEq(a,b)) { + // the same! + } else { + // different in at least one byte! + } +``` + +If you'd like to install an `.equal()` method onto the node.js `Buffer` and +`SlowBuffer` prototypes: + +```js + require('buffer-equal-constant-time').install(); + + var a = new Buffer('asdf'); + var b = new Buffer('asdf'); + if (a.equal(b)) { + // the same! + } else { + // different in at least one byte! + } +``` + +To get rid of the installed `.equal()` method, call `.restore()`: + +```js + require('buffer-equal-constant-time').restore(); +``` + +# Legal + +© 2013 GoInstant Inc., a salesforce.com company + +Licensed under the BSD 3-clause license. diff --git a/node_modules/buffer-equal-constant-time/index.js b/node_modules/buffer-equal-constant-time/index.js new file mode 100644 index 0000000..5462c1f --- /dev/null +++ b/node_modules/buffer-equal-constant-time/index.js @@ -0,0 +1,41 @@ +/*jshint node:true */ +'use strict'; +var Buffer = require('buffer').Buffer; // browserify +var SlowBuffer = require('buffer').SlowBuffer; + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; diff --git a/node_modules/buffer-equal-constant-time/package.json b/node_modules/buffer-equal-constant-time/package.json new file mode 100644 index 0000000..17c7de2 --- /dev/null +++ b/node_modules/buffer-equal-constant-time/package.json @@ -0,0 +1,21 @@ +{ + "name": "buffer-equal-constant-time", + "version": "1.0.1", + "description": "Constant-time comparison of Buffers", + "main": "index.js", + "scripts": { + "test": "mocha test.js" + }, + "repository": "git@github.com:goinstant/buffer-equal-constant-time.git", + "keywords": [ + "buffer", + "equal", + "constant-time", + "crypto" + ], + "author": "GoInstant Inc., a salesforce.com company", + "license": "BSD-3-Clause", + "devDependencies": { + "mocha": "~1.15.1" + } +} diff --git a/node_modules/buffer-equal-constant-time/test.js b/node_modules/buffer-equal-constant-time/test.js new file mode 100644 index 0000000..0bc972d --- /dev/null +++ b/node_modules/buffer-equal-constant-time/test.js @@ -0,0 +1,42 @@ +/*jshint node:true */ +'use strict'; + +var bufferEq = require('./index'); +var assert = require('assert'); + +describe('buffer-equal-constant-time', function() { + var a = new Buffer('asdfasdf123456'); + var b = new Buffer('asdfasdf123456'); + var c = new Buffer('asdfasdf'); + + describe('bufferEq', function() { + it('says a == b', function() { + assert.strictEqual(bufferEq(a, b), true); + }); + + it('says a != c', function() { + assert.strictEqual(bufferEq(a, c), false); + }); + }); + + describe('install/restore', function() { + before(function() { + bufferEq.install(); + }); + after(function() { + bufferEq.restore(); + }); + + it('installed an .equal method', function() { + var SlowBuffer = require('buffer').SlowBuffer; + assert.ok(Buffer.prototype.equal); + assert.ok(SlowBuffer.prototype.equal); + }); + + it('infected existing Buffers', function() { + assert.strictEqual(a.equal(b), true); + assert.strictEqual(a.equal(c), false); + }); + }); + +}); diff --git a/node_modules/compressible/HISTORY.md b/node_modules/compressible/HISTORY.md new file mode 100644 index 0000000..f204ef3 --- /dev/null +++ b/node_modules/compressible/HISTORY.md @@ -0,0 +1,111 @@ +2.0.18 / 2020-01-05 +=================== + + * deps: mime-db@'>= 1.43.0 < 2' + - Mark `font/ttf` as compressible + - Remove compressible from `multipart/mixed` + +2.0.17 / 2019-04-24 +=================== + + * deps: mime-db@'>= 1.40.0 < 2' + +2.0.16 / 2019-02-18 +=================== + + * deps: mime-db@'>= 1.38.0 < 2' + - Mark `text/less` as compressible + +2.0.15 / 2018-09-17 +=================== + + * deps: mime-db@'>= 1.36.0 < 2' + +2.0.14 / 2018-06-05 +=================== + + * deps: mime-db@'>= 1.34.0 < 2' + - Mark all XML-derived types as compressible + +2.0.13 / 2018-02-17 +=================== + + * deps: mime-db@'>= 1.33.0 < 2' + +2.0.12 / 2017-10-20 +=================== + + * deps: mime-db@'>= 1.30.0 < 2' + +2.0.11 / 2017-07-27 +=================== + + * deps: mime-db@'>= 1.29.0 < 2' + +2.0.10 / 2017-03-23 +=================== + + * deps: mime-db@'>= 1.27.0 < 2' + +2.0.9 / 2016-10-31 +================== + + * Fix regex fallback to not override `compressible: false` in db + * deps: mime-db@'>= 1.24.0 < 2' + +2.0.8 / 2016-05-12 +================== + + * deps: mime-db@'>= 1.23.0 < 2' + +2.0.7 / 2016-01-18 +================== + + * deps: mime-db@'>= 1.21.0 < 2' + +2.0.6 / 2015-09-29 +================== + + * deps: mime-db@'>= 1.19.0 < 2' + +2.0.5 / 2015-07-30 +================== + + * deps: mime-db@'>= 1.16.0 < 2' + +2.0.4 / 2015-07-01 +================== + + * deps: mime-db@'>= 1.14.0 < 2' + * perf: enable strict mode + +2.0.3 / 2015-06-08 +================== + + * Fix regex fallback to work if type exists, but is undefined + * perf: hoist regex declaration + * perf: use regex to extract mime + * deps: mime-db@'>= 1.13.0 < 2' + +2.0.2 / 2015-01-31 +================== + + * deps: mime-db@'>= 1.1.2 < 2' + +2.0.1 / 2014-09-28 +================== + + * deps: mime-db@1.x + - Add new mime types + - Add additional compressible + - Update charsets + + +2.0.0 / 2014-09-02 +================== + + * use mime-db + * remove .get() + * specifications are now private + * regex is now private + * stricter regex diff --git a/node_modules/compressible/LICENSE b/node_modules/compressible/LICENSE new file mode 100644 index 0000000..ce00b3f --- /dev/null +++ b/node_modules/compressible/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2014 Jeremiah Senkpiel +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/compressible/README.md b/node_modules/compressible/README.md new file mode 100644 index 0000000..dc86ec0 --- /dev/null +++ b/node_modules/compressible/README.md @@ -0,0 +1,61 @@ +# compressible + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Compressible `Content-Type` / `mime` checking. + +## Installation + +```sh +$ npm install compressible +``` + +## API + + + +```js +var compressible = require('compressible') +``` + +### compressible(type) + +Checks if the given `Content-Type` is compressible. The `type` argument is expected +to be a value MIME type or `Content-Type` string, though no validation is performed. + +The MIME is looked up in the [`mime-db`](https://www.npmjs.com/package/mime-db) and +if there is compressible information in the database entry, that is returned. Otherwise, +this module will fallback to `true` for the following types: + + * `text/*` + * `*/*+json` + * `*/*+text` + * `*/*+xml` + +If this module is not sure if a type is specifically compressible or specifically +uncompressible, `undefined` is returned. + + + +```js +compressible('text/html') // => true +compressible('image/png') // => false +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/compressible/master +[coveralls-url]: https://coveralls.io/r/jshttp/compressible?branch=master +[node-version-image]: https://badgen.net/npm/node/compressible +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/compressible +[npm-url]: https://npmjs.org/package/compressible +[npm-version-image]: https://badgen.net/npm/v/compressible +[travis-image]: https://badgen.net/travis/jshttp/compressible/master +[travis-url]: https://travis-ci.org/jshttp/compressible diff --git a/node_modules/compressible/index.js b/node_modules/compressible/index.js new file mode 100644 index 0000000..1184ada --- /dev/null +++ b/node_modules/compressible/index.js @@ -0,0 +1,58 @@ +/*! + * compressible + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Jeremiah Senkpiel + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') + +/** + * Module variables. + * @private + */ + +var COMPRESSIBLE_TYPE_REGEXP = /^text\/|\+(?:json|text|xml)$/i +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ + +/** + * Module exports. + * @public + */ + +module.exports = compressible + +/** + * Checks if a type is compressible. + * + * @param {string} type + * @return {Boolean} compressible + * @public + */ + +function compressible (type) { + if (!type || typeof type !== 'string') { + return false + } + + // strip parameters + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && match[1].toLowerCase() + var data = db[mime] + + // return database information + if (data && data.compressible !== undefined) { + return data.compressible + } + + // fallback to regexp or unknown + return COMPRESSIBLE_TYPE_REGEXP.test(mime) || undefined +} diff --git a/node_modules/compressible/package.json b/node_modules/compressible/package.json new file mode 100644 index 0000000..ab582cb --- /dev/null +++ b/node_modules/compressible/package.json @@ -0,0 +1,48 @@ +{ + "name": "compressible", + "description": "Compressible Content-Type / mime checking", + "version": "2.0.18", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Jeremiah Senkpiel (https://searchbeam.jit.su)" + ], + "license": "MIT", + "repository": "jshttp/compressible", + "keywords": [ + "compress", + "gzip", + "mime", + "content-type" + ], + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "devDependencies": { + "eslint": "6.8.0", + "eslint-config-standard": "14.1.0", + "eslint-plugin-import": "2.19.1", + "eslint-plugin-markdown": "1.0.1", + "eslint-plugin-node": "11.0.0", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "mocha": "7.0.0", + "nyc": "15.0.0" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "test-travis": "nyc --reporter=text npm test", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/duplexify/.travis.yml b/node_modules/duplexify/.travis.yml new file mode 100644 index 0000000..37494af --- /dev/null +++ b/node_modules/duplexify/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +arch: + - amd64 + - ppc64le +node_js: + - "4" + - "6" + - "8" + - "10" diff --git a/node_modules/duplexify/LICENSE b/node_modules/duplexify/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/duplexify/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/duplexify/README.md b/node_modules/duplexify/README.md new file mode 100644 index 0000000..8352900 --- /dev/null +++ b/node_modules/duplexify/README.md @@ -0,0 +1,97 @@ +# duplexify + +Turn a writeable and readable stream into a single streams2 duplex stream. + +Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input +and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)` + +``` +npm install duplexify +``` + +[![build status](http://img.shields.io/travis/mafintosh/duplexify.svg?style=flat)](http://travis-ci.org/mafintosh/duplexify) + +## Usage + +Use `duplexify(writable, readable, streamOptions)` (or `duplexify.obj(writable, readable)` to create an object stream) + +``` js +var duplexify = require('duplexify') + +// turn writableStream and readableStream into a single duplex stream +var dup = duplexify(writableStream, readableStream) + +dup.write('hello world') // will write to writableStream +dup.on('data', function(data) { + // will read from readableStream +}) +``` + +You can also set the readable and writable parts asynchronously + +``` js +var dup = duplexify() + +dup.write('hello world') // write will buffer until the writable + // part has been set + +// wait a bit ... +dup.setReadable(readableStream) + +// maybe wait some more? +dup.setWritable(writableStream) +``` + +If you call `setReadable` or `setWritable` multiple times it will unregister the previous readable/writable stream. +To disable the readable or writable part call `setReadable` or `setWritable` with `null`. + +If the readable or writable streams emits an error or close it will destroy both streams and bubble up the event. +You can also explicitly destroy the streams by calling `dup.destroy()`. The `destroy` method optionally takes an +error object as argument, in which case the error is emitted as part of the `error` event. + +``` js +dup.on('error', function(err) { + console.log('readable or writable emitted an error - close will follow') +}) + +dup.on('close', function() { + console.log('the duplex stream is destroyed') +}) + +dup.destroy() // calls destroy on the readable and writable part (if present) +``` + +## HTTP request example + +Turn a node core http request into a duplex stream is as easy as + +``` js +var duplexify = require('duplexify') +var http = require('http') + +var request = function(opts) { + var req = http.request(opts) + var dup = duplexify(req) + req.on('response', function(res) { + dup.setReadable(res) + }) + return dup +} + +var req = request({ + method: 'GET', + host: 'www.google.com', + port: 80 +}) + +req.end() +req.pipe(process.stdout) +``` + +## License + +MIT + +## Related + +`duplexify` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/duplexify/example.js b/node_modules/duplexify/example.js new file mode 100644 index 0000000..5585c19 --- /dev/null +++ b/node_modules/duplexify/example.js @@ -0,0 +1,21 @@ +var duplexify = require('duplexify') +var http = require('http') + +var request = function(opts) { + var req = http.request(opts) + var dup = duplexify() + dup.setWritable(req) + req.on('response', function(res) { + dup.setReadable(res) + }) + return dup +} + +var req = request({ + method: 'GET', + host: 'www.google.com', + port: 80 +}) + +req.end() +req.pipe(process.stdout) diff --git a/node_modules/duplexify/index.js b/node_modules/duplexify/index.js new file mode 100644 index 0000000..3430fe4 --- /dev/null +++ b/node_modules/duplexify/index.js @@ -0,0 +1,238 @@ +var stream = require('readable-stream') +var eos = require('end-of-stream') +var inherits = require('inherits') +var shift = require('stream-shift') + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify diff --git a/node_modules/duplexify/package.json b/node_modules/duplexify/package.json new file mode 100644 index 0000000..dd28048 --- /dev/null +++ b/node_modules/duplexify/package.json @@ -0,0 +1,39 @@ +{ + "name": "duplexify", + "version": "4.1.3", + "description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", + "main": "index.js", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + }, + "devDependencies": { + "concat-stream": "^1.5.2", + "tape": "^4.0.0", + "through2": "^2.0.0" + }, + "scripts": { + "test": "tape test.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/duplexify" + }, + "keywords": [ + "duplex", + "streams2", + "streams", + "stream", + "writable", + "readable", + "async" + ], + "author": "Mathias Buus", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/duplexify/issues" + }, + "homepage": "https://github.com/mafintosh/duplexify" +} diff --git a/node_modules/duplexify/test.js b/node_modules/duplexify/test.js new file mode 100644 index 0000000..9341105 --- /dev/null +++ b/node_modules/duplexify/test.js @@ -0,0 +1,339 @@ +var tape = require('tape') +var through = require('through2') +var concat = require('concat-stream') +var stream = require('readable-stream') +var net = require('net') +var duplexify = require('./') + +var HELLO_WORLD = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from('hello world') + : new Buffer('hello world') + +tape('passthrough', function(t) { + t.plan(2) + + var pt = through() + var dup = duplexify(pt, pt) + + dup.end('hello world') + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('passthrough + double end', function(t) { + t.plan(2) + + var pt = through() + var dup = duplexify(pt, pt) + + dup.end('hello world') + dup.end() + + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('async passthrough + end', function(t) { + t.plan(2) + + var pt = through.obj({highWaterMark:1}, function(data, enc, cb) { + setTimeout(function() { + cb(null, data) + }, 100) + }) + + var dup = duplexify(pt, pt) + + dup.write('hello ') + dup.write('world') + dup.end() + + dup.on('finish', function() { + t.ok(true, 'should finish') + }) + dup.pipe(concat(function(data) { + t.same(data.toString(), 'hello world', 'same in as out') + })) +}) + +tape('duplex', function(t) { + var readExpected = ['read-a', 'read-b', 'read-c'] + var writeExpected = ['write-a', 'write-b', 'write-c'] + + t.plan(readExpected.length+writeExpected.length+2) + + var readable = through.obj() + var writable = through.obj(function(data, enc, cb) { + t.same(data, writeExpected.shift(), 'onwrite should match') + cb() + }) + + var dup = duplexify.obj(writable, readable) + + readExpected.slice().forEach(function(data) { + readable.write(data) + }) + readable.end() + + writeExpected.slice().forEach(function(data) { + dup.write(data) + }) + dup.end() + + dup.on('data', function(data) { + t.same(data, readExpected.shift(), 'ondata should match') + }) + dup.on('end', function() { + t.ok(true, 'should end') + }) + dup.on('finish', function() { + t.ok(true, 'should finish') + }) +}) + +tape('async', function(t) { + var dup = duplexify() + var pt = through() + + dup.pipe(concat(function(data) { + t.same(data.toString(), 'i was async', 'same in as out') + t.end() + })) + + dup.write('i') + dup.write(' was ') + dup.end('async') + + setTimeout(function() { + dup.setWritable(pt) + setTimeout(function() { + dup.setReadable(pt) + }, 50) + }, 50) +}) + +tape('destroy', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + write.destroy = function() { + t.ok(true, 'write destroyed') + } + + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + dup.destroy() + dup.destroy() // should only work once + dup.end() +}) + +tape('destroy both', function(t) { + t.plan(3) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + write.destroy = function() { + t.ok(true, 'write destroyed') + } + + read.destroy = function() { + t.ok(true, 'read destroyed') + } + + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + dup.destroy() + dup.destroy() // should only work once +}) + +tape('bubble read errors', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + dup.on('error', function(err) { + t.same(err.message, 'read-error', 'received read error') + }) + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + read.emit('error', new Error('read-error')) + write.emit('error', new Error('write-error')) // only emit first error +}) + +tape('bubble write errors', function(t) { + t.plan(2) + + var write = through() + var read = through() + var dup = duplexify(write, read) + + dup.on('error', function(err) { + t.same(err.message, 'write-error', 'received write error') + }) + dup.on('close', function() { + t.ok(true, 'close emitted') + }) + + write.emit('error', new Error('write-error')) + read.emit('error', new Error('read-error')) // only emit first error +}) + +tape('bubble errors from write()', function(t) { + t.plan(3) + + var errored = false + var dup = duplexify(new stream.Writable({ + write: function(chunk, enc, next) { + next(new Error('write-error')) + } + })) + + dup.on('error', function(err) { + errored = true + t.same(err.message, 'write-error', 'received write error') + }) + dup.on('close', function() { + t.pass('close emitted') + t.ok(errored, 'error was emitted before close') + }) + dup.end('123') +}) + +tape('destroy while waiting for drain', function(t) { + t.plan(3) + + var errored = false + var dup = duplexify(new stream.Writable({ + highWaterMark: 0, + write: function() {} + })) + + dup.on('error', function(err) { + errored = true + t.same(err.message, 'destroy-error', 'received destroy error') + }) + dup.on('close', function() { + t.pass('close emitted') + t.ok(errored, 'error was emitted before close') + }) + dup.write('123') + dup.destroy(new Error('destroy-error')) +}) + +tape('reset writable / readable', function(t) { + t.plan(3) + + var toUpperCase = function(data, enc, cb) { + cb(null, data.toString().toUpperCase()) + } + + var passthrough = through() + var upper = through(toUpperCase) + var dup = duplexify(passthrough, passthrough) + + dup.once('data', function(data) { + t.same(data.toString(), 'hello') + dup.setWritable(upper) + dup.setReadable(upper) + dup.once('data', function(data) { + t.same(data.toString(), 'HELLO') + dup.once('data', function(data) { + t.same(data.toString(), 'HI') + t.end() + }) + }) + dup.write('hello') + dup.write('hi') + }) + dup.write('hello') +}) + +tape('cork', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + var ok = false + + dup.on('prefinish', function() { + dup.cork() + setTimeout(function() { + ok = true + dup.uncork() + }, 100) + }) + dup.on('finish', function() { + t.ok(ok) + t.end() + }) + dup.end() +}) + +tape('prefinish not twice', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + var prefinished = false + + dup.on('prefinish', function() { + t.ok(!prefinished, 'only prefinish once') + prefinished = true + }) + + dup.on('finish', function() { + t.end() + }) + + dup.end() +}) + +tape('close', function(t) { + var passthrough = through() + var dup = duplexify(passthrough, passthrough) + + passthrough.emit('close') + dup.on('close', function() { + t.ok(true, 'should forward close') + t.end() + }) +}) + +tape('works with node native streams (net)', function(t) { + t.plan(1) + + var server = net.createServer(function(socket) { + var dup = duplexify(socket, socket) + + dup.once('data', function(chunk) { + t.same(chunk, HELLO_WORLD) + server.close() + socket.end() + t.end() + }) + }) + + server.listen(0, function () { + var socket = net.connect(server.address().port) + var dup = duplexify(socket, socket) + + dup.write(HELLO_WORLD) + }) +}) diff --git a/node_modules/ecdsa-sig-formatter/CODEOWNERS b/node_modules/ecdsa-sig-formatter/CODEOWNERS new file mode 100644 index 0000000..4451d3d --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/CODEOWNERS @@ -0,0 +1 @@ +* @omsmith diff --git a/node_modules/ecdsa-sig-formatter/LICENSE b/node_modules/ecdsa-sig-formatter/LICENSE new file mode 100644 index 0000000..8754ed6 --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2015 D2L Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/ecdsa-sig-formatter/README.md b/node_modules/ecdsa-sig-formatter/README.md new file mode 100644 index 0000000..daa95d6 --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/README.md @@ -0,0 +1,65 @@ +# ecdsa-sig-formatter + +[![Build Status](https://travis-ci.org/Brightspace/node-ecdsa-sig-formatter.svg?branch=master)](https://travis-ci.org/Brightspace/node-ecdsa-sig-formatter) [![Coverage Status](https://coveralls.io/repos/Brightspace/node-ecdsa-sig-formatter/badge.svg)](https://coveralls.io/r/Brightspace/node-ecdsa-sig-formatter) + +Translate between JOSE and ASN.1/DER encodings for ECDSA signatures + +## Install +```sh +npm install ecdsa-sig-formatter --save +``` + +## Usage +```js +var format = require('ecdsa-sig-formatter'); + +var derSignature = '..'; // asn.1/DER encoded ecdsa signature + +var joseSignature = format.derToJose(derSignature); + +``` + +### API + +--- + +#### `.derToJose(Buffer|String signature, String alg)` -> `String` + +Convert the ASN.1/DER encoded signature to a JOSE-style concatenated signature. +Returns a _base64 url_ encoded `String`. + +* If _signature_ is a `String`, it should be _base64_ encoded +* _alg_ must be one of _ES256_, _ES384_ or _ES512_ + +--- + +#### `.joseToDer(Buffer|String signature, String alg)` -> `Buffer` + +Convert the JOSE-style concatenated signature to an ASN.1/DER encoded +signature. Returns a `Buffer` + +* If _signature_ is a `String`, it should be _base64 url_ encoded +* _alg_ must be one of _ES256_, _ES384_ or _ES512_ + +## Contributing + +1. **Fork** the repository. Committing directly against this repository is + highly discouraged. + +2. Make your modifications in a branch, updating and writing new unit tests + as necessary in the `spec` directory. + +3. Ensure that all tests pass with `npm test` + +4. `rebase` your changes against master. *Do not merge*. + +5. Submit a pull request to this repository. Wait for tests to run and someone + to chime in. + +### Code Style + +This repository is configured with [EditorConfig][EditorConfig] and +[ESLint][ESLint] rules. + +[EditorConfig]: http://editorconfig.org/ +[ESLint]: http://eslint.org diff --git a/node_modules/ecdsa-sig-formatter/package.json b/node_modules/ecdsa-sig-formatter/package.json new file mode 100644 index 0000000..6fb5ebf --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/package.json @@ -0,0 +1,46 @@ +{ + "name": "ecdsa-sig-formatter", + "version": "1.0.11", + "description": "Translate ECDSA signatures between ASN.1/DER and JOSE-style concatenation", + "main": "src/ecdsa-sig-formatter.js", + "scripts": { + "check-style": "eslint .", + "pretest": "npm run check-style", + "test": "istanbul cover --root src _mocha -- spec", + "report-cov": "cat ./coverage/lcov.info | coveralls" + }, + "typings": "./src/ecdsa-sig-formatter.d.ts", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/Brightspace/node-ecdsa-sig-formatter.git" + }, + "keywords": [ + "ecdsa", + "der", + "asn.1", + "jwt", + "jwa", + "jsonwebtoken", + "jose" + ], + "author": "D2L Corporation", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/Brightspace/node-ecdsa-sig-formatter/issues" + }, + "homepage": "https://github.com/Brightspace/node-ecdsa-sig-formatter#readme", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "bench": "^0.3.6", + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "eslint": "^2.12.0", + "eslint-config-brightspace": "^0.2.1", + "istanbul": "^0.4.3", + "jwk-to-pem": "^1.2.5", + "mocha": "^2.5.3", + "native-crypto": "^1.7.0" + } +} diff --git a/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts b/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts new file mode 100644 index 0000000..9693aa0 --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts @@ -0,0 +1,17 @@ +/// + +declare module "ecdsa-sig-formatter" { + /** + * Convert the ASN.1/DER encoded signature to a JOSE-style concatenated signature. Returns a base64 url encoded String. + * If signature is a String, it should be base64 encoded + * alg must be one of ES256, ES384 or ES512 + */ + export function derToJose(signature: Buffer | string, alg: string): string; + + /** + * Convert the JOSE-style concatenated signature to an ASN.1/DER encoded signature. Returns a Buffer + * If signature is a String, it should be base64 url encoded + * alg must be one of ES256, ES384 or ES512 + */ + export function joseToDer(signature: Buffer | string, alg: string): Buffer +} diff --git a/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js b/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js new file mode 100644 index 0000000..38eeb9b --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js @@ -0,0 +1,187 @@ +'use strict'; + +var Buffer = require('safe-buffer').Buffer; + +var getParamBytesForAlg = require('./param-bytes-for-alg'); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; diff --git a/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js b/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js new file mode 100644 index 0000000..9fe67ac --- /dev/null +++ b/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js @@ -0,0 +1,23 @@ +'use strict'; + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; diff --git a/node_modules/end-of-stream/LICENSE b/node_modules/end-of-stream/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/end-of-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/end-of-stream/README.md b/node_modules/end-of-stream/README.md new file mode 100644 index 0000000..857b14b --- /dev/null +++ b/node_modules/end-of-stream/README.md @@ -0,0 +1,54 @@ +# end-of-stream + +A node module that calls a callback when a readable/writable/duplex stream has completed or failed. + + npm install end-of-stream + +[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) + +## Usage + +Simply pass a stream and a callback to the `eos`. +Both legacy streams, streams2 and stream3 are supported. + +``` js +var eos = require('end-of-stream'); + +eos(readableStream, function(err) { + // this will be set to the stream instance + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended', this === readableStream); +}); + +eos(writableStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished', this === writableStream); +}); + +eos(duplexStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended and finished', this === duplexStream); +}); + +eos(duplexStream, {readable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished but might still be readable'); +}); + +eos(duplexStream, {writable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended but might still be writable'); +}); + +eos(readableStream, {error:false}, function(err) { + // do not treat emit('error', err) as a end-of-stream +}); +``` + +## License + +MIT + +## Related + +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/end-of-stream/index.js b/node_modules/end-of-stream/index.js new file mode 100644 index 0000000..c77f0d5 --- /dev/null +++ b/node_modules/end-of-stream/index.js @@ -0,0 +1,94 @@ +var once = require('once'); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; diff --git a/node_modules/end-of-stream/package.json b/node_modules/end-of-stream/package.json new file mode 100644 index 0000000..b75bbf0 --- /dev/null +++ b/node_modules/end-of-stream/package.json @@ -0,0 +1,37 @@ +{ + "name": "end-of-stream", + "version": "1.4.4", + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/end-of-stream.git" + }, + "dependencies": { + "once": "^1.4.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "stream", + "streams", + "callback", + "finish", + "close", + "end", + "wait" + ], + "bugs": { + "url": "https://github.com/mafintosh/end-of-stream/issues" + }, + "homepage": "https://github.com/mafintosh/end-of-stream", + "main": "index.js", + "author": "Mathias Buus ", + "license": "MIT", + "devDependencies": { + "tape": "^4.11.0" + } +} diff --git a/node_modules/ent/.npmignore b/node_modules/ent/.npmignore new file mode 100644 index 0000000..378eac2 --- /dev/null +++ b/node_modules/ent/.npmignore @@ -0,0 +1 @@ +build diff --git a/node_modules/ent/.travis.yml b/node_modules/ent/.travis.yml new file mode 100644 index 0000000..895dbd3 --- /dev/null +++ b/node_modules/ent/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/node_modules/ent/LICENSE b/node_modules/ent/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/ent/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ent/decode.js b/node_modules/ent/decode.js new file mode 100644 index 0000000..fee2e90 --- /dev/null +++ b/node_modules/ent/decode.js @@ -0,0 +1,32 @@ +var punycode = require('punycode'); +var entities = require('./entities.json'); + +module.exports = decode; + +function decode (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + + return str.replace(/&(#?[^;\W]+;?)/g, function (_, match) { + var m; + if (m = /^#(\d+);?$/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 10) ]); + } else if (m = /^#[Xx]([A-Fa-f0-9]+);?/.exec(match)) { + return punycode.ucs2.encode([ parseInt(m[1], 16) ]); + } else { + // named entity + var hasSemi = /;$/.test(match); + var withoutSemi = hasSemi ? match.replace(/;$/, '') : match; + var target = entities[withoutSemi] || (hasSemi && entities[match]); + + if (typeof target === 'number') { + return punycode.ucs2.encode([ target ]); + } else if (typeof target === 'string') { + return target; + } else { + return '&' + match; + } + } + }); +} diff --git a/node_modules/ent/encode.js b/node_modules/ent/encode.js new file mode 100644 index 0000000..f48764c --- /dev/null +++ b/node_modules/ent/encode.js @@ -0,0 +1,39 @@ +var punycode = require('punycode'); +var revEntities = require('./reversed.json'); + +module.exports = encode; + +function encode (str, opts) { + if (typeof str !== 'string') { + throw new TypeError('Expected a String'); + } + if (!opts) opts = {}; + + var numeric = true; + if (opts.named) numeric = false; + if (opts.numeric !== undefined) numeric = opts.numeric; + + var special = opts.special || { + '"': true, "'": true, + '<': true, '>': true, + '&': true + }; + + var codePoints = punycode.ucs2.decode(str); + var chars = []; + for (var i = 0; i < codePoints.length; i++) { + var cc = codePoints[i]; + var c = punycode.ucs2.encode([ cc ]); + var e = revEntities[cc]; + if (e && (cc >= 127 || special[c]) && !numeric) { + chars.push('&' + (/;$/.test(e) ? e : e + ';')); + } + else if (cc < 32 || cc >= 127 || special[c]) { + chars.push('&#' + cc + ';'); + } + else { + chars.push(c); + } + } + return chars.join(''); +} diff --git a/node_modules/ent/entities.json b/node_modules/ent/entities.json new file mode 100644 index 0000000..69e4cf0 --- /dev/null +++ b/node_modules/ent/entities.json @@ -0,0 +1,2233 @@ +{ + "Aacute;": "\u00C1", + "Aacute": "\u00C1", + "aacute;": "\u00E1", + "aacute": "\u00E1", + "Abreve;": "\u0102", + "abreve;": "\u0103", + "ac;": "\u223E", + "acd;": "\u223F", + "acE;": "\u223E\u0333", + "Acirc;": "\u00C2", + "Acirc": "\u00C2", + "acirc;": "\u00E2", + "acirc": "\u00E2", + "acute;": "\u00B4", + "acute": "\u00B4", + "Acy;": "\u0410", + "acy;": "\u0430", + "AElig;": "\u00C6", + "AElig": "\u00C6", + "aelig;": "\u00E6", + "aelig": "\u00E6", + "af;": "\u2061", + "Afr;": "\uD835\uDD04", + "afr;": "\uD835\uDD1E", + "Agrave;": "\u00C0", + "Agrave": "\u00C0", + "agrave;": "\u00E0", + "agrave": "\u00E0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "Alpha;": "\u0391", + "alpha;": "\u03B1", + "Amacr;": "\u0100", + "amacr;": "\u0101", + "amalg;": "\u2A3F", + "AMP;": "&", + "AMP": "&", + "amp;": "&", + "amp": "&", + "And;": "\u2A53", + "and;": "\u2227", + "andand;": "\u2A55", + "andd;": "\u2A5C", + "andslope;": "\u2A58", + "andv;": "\u2A5A", + "ang;": "\u2220", + "ange;": "\u29A4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29A8", + "angmsdab;": "\u29A9", + "angmsdac;": "\u29AA", + "angmsdad;": "\u29AB", + "angmsdae;": "\u29AC", + "angmsdaf;": "\u29AD", + "angmsdag;": "\u29AE", + "angmsdah;": "\u29AF", + "angrt;": "\u221F", + "angrtvb;": "\u22BE", + "angrtvbd;": "\u299D", + "angsph;": "\u2222", + "angst;": "\u00C5", + "angzarr;": "\u237C", + "Aogon;": "\u0104", + "aogon;": "\u0105", + "Aopf;": "\uD835\uDD38", + "aopf;": "\uD835\uDD52", + "ap;": "\u2248", + "apacir;": "\u2A6F", + "apE;": "\u2A70", + "ape;": "\u224A", + "apid;": "\u224B", + "apos;": "'", + "ApplyFunction;": "\u2061", + "approx;": "\u2248", + "approxeq;": "\u224A", + "Aring;": "\u00C5", + "Aring": "\u00C5", + "aring;": "\u00E5", + "aring": "\u00E5", + "Ascr;": "\uD835\uDC9C", + "ascr;": "\uD835\uDCB6", + "Assign;": "\u2254", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224D", + "Atilde;": "\u00C3", + "Atilde": "\u00C3", + "atilde;": "\u00E3", + "atilde": "\u00E3", + "Auml;": "\u00C4", + "Auml": "\u00C4", + "auml;": "\u00E4", + "auml": "\u00E4", + "awconint;": "\u2233", + "awint;": "\u2A11", + "backcong;": "\u224C", + "backepsilon;": "\u03F6", + "backprime;": "\u2035", + "backsim;": "\u223D", + "backsimeq;": "\u22CD", + "Backslash;": "\u2216", + "Barv;": "\u2AE7", + "barvee;": "\u22BD", + "Barwed;": "\u2306", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23B5", + "bbrktbrk;": "\u23B6", + "bcong;": "\u224C", + "Bcy;": "\u0411", + "bcy;": "\u0431", + "bdquo;": "\u201E", + "becaus;": "\u2235", + "Because;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29B0", + "bepsi;": "\u03F6", + "bernou;": "\u212C", + "Bernoullis;": "\u212C", + "Beta;": "\u0392", + "beta;": "\u03B2", + "beth;": "\u2136", + "between;": "\u226C", + "Bfr;": "\uD835\uDD05", + "bfr;": "\uD835\uDD1F", + "bigcap;": "\u22C2", + "bigcirc;": "\u25EF", + "bigcup;": "\u22C3", + "bigodot;": "\u2A00", + "bigoplus;": "\u2A01", + "bigotimes;": "\u2A02", + "bigsqcup;": "\u2A06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25BD", + "bigtriangleup;": "\u25B3", + "biguplus;": "\u2A04", + "bigvee;": "\u22C1", + "bigwedge;": "\u22C0", + "bkarow;": "\u290D", + "blacklozenge;": "\u29EB", + "blacksquare;": "\u25AA", + "blacktriangle;": "\u25B4", + "blacktriangledown;": "\u25BE", + "blacktriangleleft;": "\u25C2", + "blacktriangleright;": "\u25B8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20E5", + "bnequiv;": "\u2261\u20E5", + "bNot;": "\u2AED", + "bnot;": "\u2310", + "Bopf;": "\uD835\uDD39", + "bopf;": "\uD835\uDD53", + "bot;": "\u22A5", + "bottom;": "\u22A5", + "bowtie;": "\u22C8", + "boxbox;": "\u29C9", + "boxDL;": "\u2557", + "boxDl;": "\u2556", + "boxdL;": "\u2555", + "boxdl;": "\u2510", + "boxDR;": "\u2554", + "boxDr;": "\u2553", + "boxdR;": "\u2552", + "boxdr;": "\u250C", + "boxH;": "\u2550", + "boxh;": "\u2500", + "boxHD;": "\u2566", + "boxHd;": "\u2564", + "boxhD;": "\u2565", + "boxhd;": "\u252C", + "boxHU;": "\u2569", + "boxHu;": "\u2567", + "boxhU;": "\u2568", + "boxhu;": "\u2534", + "boxminus;": "\u229F", + "boxplus;": "\u229E", + "boxtimes;": "\u22A0", + "boxUL;": "\u255D", + "boxUl;": "\u255C", + "boxuL;": "\u255B", + "boxul;": "\u2518", + "boxUR;": "\u255A", + "boxUr;": "\u2559", + "boxuR;": "\u2558", + "boxur;": "\u2514", + "boxV;": "\u2551", + "boxv;": "\u2502", + "boxVH;": "\u256C", + "boxVh;": "\u256B", + "boxvH;": "\u256A", + "boxvh;": "\u253C", + "boxVL;": "\u2563", + "boxVl;": "\u2562", + "boxvL;": "\u2561", + "boxvl;": "\u2524", + "boxVR;": "\u2560", + "boxVr;": "\u255F", + "boxvR;": "\u255E", + "boxvr;": "\u251C", + "bprime;": "\u2035", + "Breve;": "\u02D8", + "breve;": "\u02D8", + "brvbar;": "\u00A6", + "brvbar": "\u00A6", + "Bscr;": "\u212C", + "bscr;": "\uD835\uDCB7", + "bsemi;": "\u204F", + "bsim;": "\u223D", + "bsime;": "\u22CD", + "bsol;": "\\", + "bsolb;": "\u29C5", + "bsolhsub;": "\u27C8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224E", + "bumpE;": "\u2AAE", + "bumpe;": "\u224F", + "Bumpeq;": "\u224E", + "bumpeq;": "\u224F", + "Cacute;": "\u0106", + "cacute;": "\u0107", + "Cap;": "\u22D2", + "cap;": "\u2229", + "capand;": "\u2A44", + "capbrcup;": "\u2A49", + "capcap;": "\u2A4B", + "capcup;": "\u2A47", + "capdot;": "\u2A40", + "CapitalDifferentialD;": "\u2145", + "caps;": "\u2229\uFE00", + "caret;": "\u2041", + "caron;": "\u02C7", + "Cayleys;": "\u212D", + "ccaps;": "\u2A4D", + "Ccaron;": "\u010C", + "ccaron;": "\u010D", + "Ccedil;": "\u00C7", + "Ccedil": "\u00C7", + "ccedil;": "\u00E7", + "ccedil": "\u00E7", + "Ccirc;": "\u0108", + "ccirc;": "\u0109", + "Cconint;": "\u2230", + "ccups;": "\u2A4C", + "ccupssm;": "\u2A50", + "Cdot;": "\u010A", + "cdot;": "\u010B", + "cedil;": "\u00B8", + "cedil": "\u00B8", + "Cedilla;": "\u00B8", + "cemptyv;": "\u29B2", + "cent;": "\u00A2", + "cent": "\u00A2", + "CenterDot;": "\u00B7", + "centerdot;": "\u00B7", + "Cfr;": "\u212D", + "cfr;": "\uD835\uDD20", + "CHcy;": "\u0427", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "Chi;": "\u03A7", + "chi;": "\u03C7", + "cir;": "\u25CB", + "circ;": "\u02C6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21BA", + "circlearrowright;": "\u21BB", + "circledast;": "\u229B", + "circledcirc;": "\u229A", + "circleddash;": "\u229D", + "CircleDot;": "\u2299", + "circledR;": "\u00AE", + "circledS;": "\u24C8", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "cirE;": "\u29C3", + "cire;": "\u2257", + "cirfnint;": "\u2A10", + "cirmid;": "\u2AEF", + "cirscir;": "\u29C2", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201D", + "CloseCurlyQuote;": "\u2019", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "Colon;": "\u2237", + "colon;": ":", + "Colone;": "\u2A74", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2A6D", + "Congruent;": "\u2261", + "Conint;": "\u222F", + "conint;": "\u222E", + "ContourIntegral;": "\u222E", + "Copf;": "\u2102", + "copf;": "\uD835\uDD54", + "coprod;": "\u2210", + "Coproduct;": "\u2210", + "COPY;": "\u00A9", + "COPY": "\u00A9", + "copy;": "\u00A9", + "copy": "\u00A9", + "copysr;": "\u2117", + "CounterClockwiseContourIntegral;": "\u2233", + "crarr;": "\u21B5", + "Cross;": "\u2A2F", + "cross;": "\u2717", + "Cscr;": "\uD835\uDC9E", + "cscr;": "\uD835\uDCB8", + "csub;": "\u2ACF", + "csube;": "\u2AD1", + "csup;": "\u2AD0", + "csupe;": "\u2AD2", + "ctdot;": "\u22EF", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22DE", + "cuesc;": "\u22DF", + "cularr;": "\u21B6", + "cularrp;": "\u293D", + "Cup;": "\u22D3", + "cup;": "\u222A", + "cupbrcap;": "\u2A48", + "CupCap;": "\u224D", + "cupcap;": "\u2A46", + "cupcup;": "\u2A4A", + "cupdot;": "\u228D", + "cupor;": "\u2A45", + "cups;": "\u222A\uFE00", + "curarr;": "\u21B7", + "curarrm;": "\u293C", + "curlyeqprec;": "\u22DE", + "curlyeqsucc;": "\u22DF", + "curlyvee;": "\u22CE", + "curlywedge;": "\u22CF", + "curren;": "\u00A4", + "curren": "\u00A4", + "curvearrowleft;": "\u21B6", + "curvearrowright;": "\u21B7", + "cuvee;": "\u22CE", + "cuwed;": "\u22CF", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232D", + "Dagger;": "\u2021", + "dagger;": "\u2020", + "daleth;": "\u2138", + "Darr;": "\u21A1", + "dArr;": "\u21D3", + "darr;": "\u2193", + "dash;": "\u2010", + "Dashv;": "\u2AE4", + "dashv;": "\u22A3", + "dbkarow;": "\u290F", + "dblac;": "\u02DD", + "Dcaron;": "\u010E", + "dcaron;": "\u010F", + "Dcy;": "\u0414", + "dcy;": "\u0434", + "DD;": "\u2145", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21CA", + "DDotrahd;": "\u2911", + "ddotseq;": "\u2A77", + "deg;": "\u00B0", + "deg": "\u00B0", + "Del;": "\u2207", + "Delta;": "\u0394", + "delta;": "\u03B4", + "demptyv;": "\u29B1", + "dfisht;": "\u297F", + "Dfr;": "\uD835\uDD07", + "dfr;": "\uD835\uDD21", + "dHar;": "\u2965", + "dharl;": "\u21C3", + "dharr;": "\u21C2", + "DiacriticalAcute;": "\u00B4", + "DiacriticalDot;": "\u02D9", + "DiacriticalDoubleAcute;": "\u02DD", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02DC", + "diam;": "\u22C4", + "Diamond;": "\u22C4", + "diamond;": "\u22C4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\u00A8", + "DifferentialD;": "\u2146", + "digamma;": "\u03DD", + "disin;": "\u22F2", + "div;": "\u00F7", + "divide;": "\u00F7", + "divide": "\u00F7", + "divideontimes;": "\u22C7", + "divonx;": "\u22C7", + "DJcy;": "\u0402", + "djcy;": "\u0452", + "dlcorn;": "\u231E", + "dlcrop;": "\u230D", + "dollar;": "$", + "Dopf;": "\uD835\uDD3B", + "dopf;": "\uD835\uDD55", + "Dot;": "\u00A8", + "dot;": "\u02D9", + "DotDot;": "\u20DC", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "DotEqual;": "\u2250", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22A1", + "doublebarwedge;": "\u2306", + "DoubleContourIntegral;": "\u222F", + "DoubleDot;": "\u00A8", + "DoubleDownArrow;": "\u21D3", + "DoubleLeftArrow;": "\u21D0", + "DoubleLeftRightArrow;": "\u21D4", + "DoubleLeftTee;": "\u2AE4", + "DoubleLongLeftArrow;": "\u27F8", + "DoubleLongLeftRightArrow;": "\u27FA", + "DoubleLongRightArrow;": "\u27F9", + "DoubleRightArrow;": "\u21D2", + "DoubleRightTee;": "\u22A8", + "DoubleUpArrow;": "\u21D1", + "DoubleUpDownArrow;": "\u21D5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "Downarrow;": "\u21D3", + "downarrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21F5", + "DownBreve;": "\u0311", + "downdownarrows;": "\u21CA", + "downharpoonleft;": "\u21C3", + "downharpoonright;": "\u21C2", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295E", + "DownLeftVector;": "\u21BD", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295F", + "DownRightVector;": "\u21C1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22A4", + "DownTeeArrow;": "\u21A7", + "drbkarow;": "\u2910", + "drcorn;": "\u231F", + "drcrop;": "\u230C", + "Dscr;": "\uD835\uDC9F", + "dscr;": "\uD835\uDCB9", + "DScy;": "\u0405", + "dscy;": "\u0455", + "dsol;": "\u29F6", + "Dstrok;": "\u0110", + "dstrok;": "\u0111", + "dtdot;": "\u22F1", + "dtri;": "\u25BF", + "dtrif;": "\u25BE", + "duarr;": "\u21F5", + "duhar;": "\u296F", + "dwangle;": "\u29A6", + "DZcy;": "\u040F", + "dzcy;": "\u045F", + "dzigrarr;": "\u27FF", + "Eacute;": "\u00C9", + "Eacute": "\u00C9", + "eacute;": "\u00E9", + "eacute": "\u00E9", + "easter;": "\u2A6E", + "Ecaron;": "\u011A", + "ecaron;": "\u011B", + "ecir;": "\u2256", + "Ecirc;": "\u00CA", + "Ecirc": "\u00CA", + "ecirc;": "\u00EA", + "ecirc": "\u00EA", + "ecolon;": "\u2255", + "Ecy;": "\u042D", + "ecy;": "\u044D", + "eDDot;": "\u2A77", + "Edot;": "\u0116", + "eDot;": "\u2251", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "Efr;": "\uD835\uDD08", + "efr;": "\uD835\uDD22", + "eg;": "\u2A9A", + "Egrave;": "\u00C8", + "Egrave": "\u00C8", + "egrave;": "\u00E8", + "egrave": "\u00E8", + "egs;": "\u2A96", + "egsdot;": "\u2A98", + "el;": "\u2A99", + "Element;": "\u2208", + "elinters;": "\u23E7", + "ell;": "\u2113", + "els;": "\u2A95", + "elsdot;": "\u2A97", + "Emacr;": "\u0112", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "EmptySmallSquare;": "\u25FB", + "emptyv;": "\u2205", + "EmptyVerySmallSquare;": "\u25AB", + "emsp;": "\u2003", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "ENG;": "\u014A", + "eng;": "\u014B", + "ensp;": "\u2002", + "Eogon;": "\u0118", + "eogon;": "\u0119", + "Eopf;": "\uD835\uDD3C", + "eopf;": "\uD835\uDD56", + "epar;": "\u22D5", + "eparsl;": "\u29E3", + "eplus;": "\u2A71", + "epsi;": "\u03B5", + "Epsilon;": "\u0395", + "epsilon;": "\u03B5", + "epsiv;": "\u03F5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2A96", + "eqslantless;": "\u2A95", + "Equal;": "\u2A75", + "equals;": "=", + "EqualTilde;": "\u2242", + "equest;": "\u225F", + "Equilibrium;": "\u21CC", + "equiv;": "\u2261", + "equivDD;": "\u2A78", + "eqvparsl;": "\u29E5", + "erarr;": "\u2971", + "erDot;": "\u2253", + "Escr;": "\u2130", + "escr;": "\u212F", + "esdot;": "\u2250", + "Esim;": "\u2A73", + "esim;": "\u2242", + "Eta;": "\u0397", + "eta;": "\u03B7", + "ETH;": "\u00D0", + "ETH": "\u00D0", + "eth;": "\u00F0", + "eth": "\u00F0", + "Euml;": "\u00CB", + "Euml": "\u00CB", + "euml;": "\u00EB", + "euml": "\u00EB", + "euro;": "\u20AC", + "excl;": "!", + "exist;": "\u2203", + "Exists;": "\u2203", + "expectation;": "\u2130", + "ExponentialE;": "\u2147", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "Fcy;": "\u0424", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\uFB03", + "fflig;": "\uFB00", + "ffllig;": "\uFB04", + "Ffr;": "\uD835\uDD09", + "ffr;": "\uD835\uDD23", + "filig;": "\uFB01", + "FilledSmallSquare;": "\u25FC", + "FilledVerySmallSquare;": "\u25AA", + "fjlig;": "fj", + "flat;": "\u266D", + "fllig;": "\uFB02", + "fltns;": "\u25B1", + "fnof;": "\u0192", + "Fopf;": "\uD835\uDD3D", + "fopf;": "\uD835\uDD57", + "ForAll;": "\u2200", + "forall;": "\u2200", + "fork;": "\u22D4", + "forkv;": "\u2AD9", + "Fouriertrf;": "\u2131", + "fpartint;": "\u2A0D", + "frac12;": "\u00BD", + "frac12": "\u00BD", + "frac13;": "\u2153", + "frac14;": "\u00BC", + "frac14": "\u00BC", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215B", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34;": "\u00BE", + "frac34": "\u00BE", + "frac35;": "\u2157", + "frac38;": "\u215C", + "frac45;": "\u2158", + "frac56;": "\u215A", + "frac58;": "\u215D", + "frac78;": "\u215E", + "frasl;": "\u2044", + "frown;": "\u2322", + "Fscr;": "\u2131", + "fscr;": "\uD835\uDCBB", + "gacute;": "\u01F5", + "Gamma;": "\u0393", + "gamma;": "\u03B3", + "Gammad;": "\u03DC", + "gammad;": "\u03DD", + "gap;": "\u2A86", + "Gbreve;": "\u011E", + "gbreve;": "\u011F", + "Gcedil;": "\u0122", + "Gcirc;": "\u011C", + "gcirc;": "\u011D", + "Gcy;": "\u0413", + "gcy;": "\u0433", + "Gdot;": "\u0120", + "gdot;": "\u0121", + "gE;": "\u2267", + "ge;": "\u2265", + "gEl;": "\u2A8C", + "gel;": "\u22DB", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2A7E", + "ges;": "\u2A7E", + "gescc;": "\u2AA9", + "gesdot;": "\u2A80", + "gesdoto;": "\u2A82", + "gesdotol;": "\u2A84", + "gesl;": "\u22DB\uFE00", + "gesles;": "\u2A94", + "Gfr;": "\uD835\uDD0A", + "gfr;": "\uD835\uDD24", + "Gg;": "\u22D9", + "gg;": "\u226B", + "ggg;": "\u22D9", + "gimel;": "\u2137", + "GJcy;": "\u0403", + "gjcy;": "\u0453", + "gl;": "\u2277", + "gla;": "\u2AA5", + "glE;": "\u2A92", + "glj;": "\u2AA4", + "gnap;": "\u2A8A", + "gnapprox;": "\u2A8A", + "gnE;": "\u2269", + "gne;": "\u2A88", + "gneq;": "\u2A88", + "gneqq;": "\u2269", + "gnsim;": "\u22E7", + "Gopf;": "\uD835\uDD3E", + "gopf;": "\uD835\uDD58", + "grave;": "`", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22DB", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2AA2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2A7E", + "GreaterTilde;": "\u2273", + "Gscr;": "\uD835\uDCA2", + "gscr;": "\u210A", + "gsim;": "\u2273", + "gsime;": "\u2A8E", + "gsiml;": "\u2A90", + "GT;": ">", + "GT": ">", + "Gt;": "\u226B", + "gt;": ">", + "gt": ">", + "gtcc;": "\u2AA7", + "gtcir;": "\u2A7A", + "gtdot;": "\u22D7", + "gtlPar;": "\u2995", + "gtquest;": "\u2A7C", + "gtrapprox;": "\u2A86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22D7", + "gtreqless;": "\u22DB", + "gtreqqless;": "\u2A8C", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\uFE00", + "gvnE;": "\u2269\uFE00", + "Hacek;": "\u02C7", + "hairsp;": "\u200A", + "half;": "\u00BD", + "hamilt;": "\u210B", + "HARDcy;": "\u042A", + "hardcy;": "\u044A", + "hArr;": "\u21D4", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21AD", + "Hat;": "^", + "hbar;": "\u210F", + "Hcirc;": "\u0124", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22B9", + "Hfr;": "\u210C", + "hfr;": "\uD835\uDD25", + "HilbertSpace;": "\u210B", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21FF", + "homtht;": "\u223B", + "hookleftarrow;": "\u21A9", + "hookrightarrow;": "\u21AA", + "Hopf;": "\u210D", + "hopf;": "\uD835\uDD59", + "horbar;": "\u2015", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210B", + "hscr;": "\uD835\uDCBD", + "hslash;": "\u210F", + "Hstrok;": "\u0126", + "hstrok;": "\u0127", + "HumpDownHump;": "\u224E", + "HumpEqual;": "\u224F", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "Iacute;": "\u00CD", + "Iacute": "\u00CD", + "iacute;": "\u00ED", + "iacute": "\u00ED", + "ic;": "\u2063", + "Icirc;": "\u00CE", + "Icirc": "\u00CE", + "icirc;": "\u00EE", + "icirc": "\u00EE", + "Icy;": "\u0418", + "icy;": "\u0438", + "Idot;": "\u0130", + "IEcy;": "\u0415", + "iecy;": "\u0435", + "iexcl;": "\u00A1", + "iexcl": "\u00A1", + "iff;": "\u21D4", + "Ifr;": "\u2111", + "ifr;": "\uD835\uDD26", + "Igrave;": "\u00CC", + "Igrave": "\u00CC", + "igrave;": "\u00EC", + "igrave": "\u00EC", + "ii;": "\u2148", + "iiiint;": "\u2A0C", + "iiint;": "\u222D", + "iinfin;": "\u29DC", + "iiota;": "\u2129", + "IJlig;": "\u0132", + "ijlig;": "\u0133", + "Im;": "\u2111", + "Imacr;": "\u012A", + "imacr;": "\u012B", + "image;": "\u2111", + "ImaginaryI;": "\u2148", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22B7", + "imped;": "\u01B5", + "Implies;": "\u21D2", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221E", + "infintie;": "\u29DD", + "inodot;": "\u0131", + "Int;": "\u222C", + "int;": "\u222B", + "intcal;": "\u22BA", + "integers;": "\u2124", + "Integral;": "\u222B", + "intercal;": "\u22BA", + "Intersection;": "\u22C2", + "intlarhk;": "\u2A17", + "intprod;": "\u2A3C", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "IOcy;": "\u0401", + "iocy;": "\u0451", + "Iogon;": "\u012E", + "iogon;": "\u012F", + "Iopf;": "\uD835\uDD40", + "iopf;": "\uD835\uDD5A", + "Iota;": "\u0399", + "iota;": "\u03B9", + "iprod;": "\u2A3C", + "iquest;": "\u00BF", + "iquest": "\u00BF", + "Iscr;": "\u2110", + "iscr;": "\uD835\uDCBE", + "isin;": "\u2208", + "isindot;": "\u22F5", + "isinE;": "\u22F9", + "isins;": "\u22F4", + "isinsv;": "\u22F3", + "isinv;": "\u2208", + "it;": "\u2062", + "Itilde;": "\u0128", + "itilde;": "\u0129", + "Iukcy;": "\u0406", + "iukcy;": "\u0456", + "Iuml;": "\u00CF", + "Iuml": "\u00CF", + "iuml;": "\u00EF", + "iuml": "\u00EF", + "Jcirc;": "\u0134", + "jcirc;": "\u0135", + "Jcy;": "\u0419", + "jcy;": "\u0439", + "Jfr;": "\uD835\uDD0D", + "jfr;": "\uD835\uDD27", + "jmath;": "\u0237", + "Jopf;": "\uD835\uDD41", + "jopf;": "\uD835\uDD5B", + "Jscr;": "\uD835\uDCA5", + "jscr;": "\uD835\uDCBF", + "Jsercy;": "\u0408", + "jsercy;": "\u0458", + "Jukcy;": "\u0404", + "jukcy;": "\u0454", + "Kappa;": "\u039A", + "kappa;": "\u03BA", + "kappav;": "\u03F0", + "Kcedil;": "\u0136", + "kcedil;": "\u0137", + "Kcy;": "\u041A", + "kcy;": "\u043A", + "Kfr;": "\uD835\uDD0E", + "kfr;": "\uD835\uDD28", + "kgreen;": "\u0138", + "KHcy;": "\u0425", + "khcy;": "\u0445", + "KJcy;": "\u040C", + "kjcy;": "\u045C", + "Kopf;": "\uD835\uDD42", + "kopf;": "\uD835\uDD5C", + "Kscr;": "\uD835\uDCA6", + "kscr;": "\uD835\uDCC0", + "lAarr;": "\u21DA", + "Lacute;": "\u0139", + "lacute;": "\u013A", + "laemptyv;": "\u29B4", + "lagran;": "\u2112", + "Lambda;": "\u039B", + "lambda;": "\u03BB", + "Lang;": "\u27EA", + "lang;": "\u27E8", + "langd;": "\u2991", + "langle;": "\u27E8", + "lap;": "\u2A85", + "Laplacetrf;": "\u2112", + "laquo;": "\u00AB", + "laquo": "\u00AB", + "Larr;": "\u219E", + "lArr;": "\u21D0", + "larr;": "\u2190", + "larrb;": "\u21E4", + "larrbfs;": "\u291F", + "larrfs;": "\u291D", + "larrhk;": "\u21A9", + "larrlp;": "\u21AB", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21A2", + "lat;": "\u2AAB", + "lAtail;": "\u291B", + "latail;": "\u2919", + "late;": "\u2AAD", + "lates;": "\u2AAD\uFE00", + "lBarr;": "\u290E", + "lbarr;": "\u290C", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298B", + "lbrksld;": "\u298F", + "lbrkslu;": "\u298D", + "Lcaron;": "\u013D", + "lcaron;": "\u013E", + "Lcedil;": "\u013B", + "lcedil;": "\u013C", + "lceil;": "\u2308", + "lcub;": "{", + "Lcy;": "\u041B", + "lcy;": "\u043B", + "ldca;": "\u2936", + "ldquo;": "\u201C", + "ldquor;": "\u201E", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294B", + "ldsh;": "\u21B2", + "lE;": "\u2266", + "le;": "\u2264", + "LeftAngleBracket;": "\u27E8", + "LeftArrow;": "\u2190", + "Leftarrow;": "\u21D0", + "leftarrow;": "\u2190", + "LeftArrowBar;": "\u21E4", + "LeftArrowRightArrow;": "\u21C6", + "leftarrowtail;": "\u21A2", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27E6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21C3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230A", + "leftharpoondown;": "\u21BD", + "leftharpoonup;": "\u21BC", + "leftleftarrows;": "\u21C7", + "LeftRightArrow;": "\u2194", + "Leftrightarrow;": "\u21D4", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21C6", + "leftrightharpoons;": "\u21CB", + "leftrightsquigarrow;": "\u21AD", + "LeftRightVector;": "\u294E", + "LeftTee;": "\u22A3", + "LeftTeeArrow;": "\u21A4", + "LeftTeeVector;": "\u295A", + "leftthreetimes;": "\u22CB", + "LeftTriangle;": "\u22B2", + "LeftTriangleBar;": "\u29CF", + "LeftTriangleEqual;": "\u22B4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21BF", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21BC", + "LeftVectorBar;": "\u2952", + "lEg;": "\u2A8B", + "leg;": "\u22DA", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2A7D", + "les;": "\u2A7D", + "lescc;": "\u2AA8", + "lesdot;": "\u2A7F", + "lesdoto;": "\u2A81", + "lesdotor;": "\u2A83", + "lesg;": "\u22DA\uFE00", + "lesges;": "\u2A93", + "lessapprox;": "\u2A85", + "lessdot;": "\u22D6", + "lesseqgtr;": "\u22DA", + "lesseqqgtr;": "\u2A8B", + "LessEqualGreater;": "\u22DA", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "lessgtr;": "\u2276", + "LessLess;": "\u2AA1", + "lesssim;": "\u2272", + "LessSlantEqual;": "\u2A7D", + "LessTilde;": "\u2272", + "lfisht;": "\u297C", + "lfloor;": "\u230A", + "Lfr;": "\uD835\uDD0F", + "lfr;": "\uD835\uDD29", + "lg;": "\u2276", + "lgE;": "\u2A91", + "lHar;": "\u2962", + "lhard;": "\u21BD", + "lharu;": "\u21BC", + "lharul;": "\u296A", + "lhblk;": "\u2584", + "LJcy;": "\u0409", + "ljcy;": "\u0459", + "Ll;": "\u22D8", + "ll;": "\u226A", + "llarr;": "\u21C7", + "llcorner;": "\u231E", + "Lleftarrow;": "\u21DA", + "llhard;": "\u296B", + "lltri;": "\u25FA", + "Lmidot;": "\u013F", + "lmidot;": "\u0140", + "lmoust;": "\u23B0", + "lmoustache;": "\u23B0", + "lnap;": "\u2A89", + "lnapprox;": "\u2A89", + "lnE;": "\u2268", + "lne;": "\u2A87", + "lneq;": "\u2A87", + "lneqq;": "\u2268", + "lnsim;": "\u22E6", + "loang;": "\u27EC", + "loarr;": "\u21FD", + "lobrk;": "\u27E6", + "LongLeftArrow;": "\u27F5", + "Longleftarrow;": "\u27F8", + "longleftarrow;": "\u27F5", + "LongLeftRightArrow;": "\u27F7", + "Longleftrightarrow;": "\u27FA", + "longleftrightarrow;": "\u27F7", + "longmapsto;": "\u27FC", + "LongRightArrow;": "\u27F6", + "Longrightarrow;": "\u27F9", + "longrightarrow;": "\u27F6", + "looparrowleft;": "\u21AB", + "looparrowright;": "\u21AC", + "lopar;": "\u2985", + "Lopf;": "\uD835\uDD43", + "lopf;": "\uD835\uDD5D", + "loplus;": "\u2A2D", + "lotimes;": "\u2A34", + "lowast;": "\u2217", + "lowbar;": "_", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "loz;": "\u25CA", + "lozenge;": "\u25CA", + "lozf;": "\u29EB", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21C6", + "lrcorner;": "\u231F", + "lrhar;": "\u21CB", + "lrhard;": "\u296D", + "lrm;": "\u200E", + "lrtri;": "\u22BF", + "lsaquo;": "\u2039", + "Lscr;": "\u2112", + "lscr;": "\uD835\uDCC1", + "Lsh;": "\u21B0", + "lsh;": "\u21B0", + "lsim;": "\u2272", + "lsime;": "\u2A8D", + "lsimg;": "\u2A8F", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201A", + "Lstrok;": "\u0141", + "lstrok;": "\u0142", + "LT;": "<", + "LT": "<", + "Lt;": "\u226A", + "lt;": "<", + "lt": "<", + "ltcc;": "\u2AA6", + "ltcir;": "\u2A79", + "ltdot;": "\u22D6", + "lthree;": "\u22CB", + "ltimes;": "\u22C9", + "ltlarr;": "\u2976", + "ltquest;": "\u2A7B", + "ltri;": "\u25C3", + "ltrie;": "\u22B4", + "ltrif;": "\u25C2", + "ltrPar;": "\u2996", + "lurdshar;": "\u294A", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\uFE00", + "lvnE;": "\u2268\uFE00", + "macr;": "\u00AF", + "macr": "\u00AF", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "Map;": "\u2905", + "map;": "\u21A6", + "mapsto;": "\u21A6", + "mapstodown;": "\u21A7", + "mapstoleft;": "\u21A4", + "mapstoup;": "\u21A5", + "marker;": "\u25AE", + "mcomma;": "\u2A29", + "Mcy;": "\u041C", + "mcy;": "\u043C", + "mdash;": "\u2014", + "mDDot;": "\u223A", + "measuredangle;": "\u2221", + "MediumSpace;": "\u205F", + "Mellintrf;": "\u2133", + "Mfr;": "\uD835\uDD10", + "mfr;": "\uD835\uDD2A", + "mho;": "\u2127", + "micro;": "\u00B5", + "micro": "\u00B5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2AF0", + "middot;": "\u00B7", + "middot": "\u00B7", + "minus;": "\u2212", + "minusb;": "\u229F", + "minusd;": "\u2238", + "minusdu;": "\u2A2A", + "MinusPlus;": "\u2213", + "mlcp;": "\u2ADB", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22A7", + "Mopf;": "\uD835\uDD44", + "mopf;": "\uD835\uDD5E", + "mp;": "\u2213", + "Mscr;": "\u2133", + "mscr;": "\uD835\uDCC2", + "mstpos;": "\u223E", + "Mu;": "\u039C", + "mu;": "\u03BC", + "multimap;": "\u22B8", + "mumap;": "\u22B8", + "nabla;": "\u2207", + "Nacute;": "\u0143", + "nacute;": "\u0144", + "nang;": "\u2220\u20D2", + "nap;": "\u2249", + "napE;": "\u2A70\u0338", + "napid;": "\u224B\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266E", + "natural;": "\u266E", + "naturals;": "\u2115", + "nbsp;": "\u00A0", + "nbsp": "\u00A0", + "nbump;": "\u224E\u0338", + "nbumpe;": "\u224F\u0338", + "ncap;": "\u2A43", + "Ncaron;": "\u0147", + "ncaron;": "\u0148", + "Ncedil;": "\u0145", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2A6D\u0338", + "ncup;": "\u2A42", + "Ncy;": "\u041D", + "ncy;": "\u043D", + "ndash;": "\u2013", + "ne;": "\u2260", + "nearhk;": "\u2924", + "neArr;": "\u21D7", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "NegativeMediumSpace;": "\u200B", + "NegativeThickSpace;": "\u200B", + "NegativeThinSpace;": "\u200B", + "NegativeVeryThinSpace;": "\u200B", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "NestedGreaterGreater;": "\u226B", + "NestedLessLess;": "\u226A", + "NewLine;": "\n", + "nexist;": "\u2204", + "nexists;": "\u2204", + "Nfr;": "\uD835\uDD11", + "nfr;": "\uD835\uDD2B", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2A7E\u0338", + "nges;": "\u2A7E\u0338", + "nGg;": "\u22D9\u0338", + "ngsim;": "\u2275", + "nGt;": "\u226B\u20D2", + "ngt;": "\u226F", + "ngtr;": "\u226F", + "nGtv;": "\u226B\u0338", + "nhArr;": "\u21CE", + "nharr;": "\u21AE", + "nhpar;": "\u2AF2", + "ni;": "\u220B", + "nis;": "\u22FC", + "nisd;": "\u22FA", + "niv;": "\u220B", + "NJcy;": "\u040A", + "njcy;": "\u045A", + "nlArr;": "\u21CD", + "nlarr;": "\u219A", + "nldr;": "\u2025", + "nlE;": "\u2266\u0338", + "nle;": "\u2270", + "nLeftarrow;": "\u21CD", + "nleftarrow;": "\u219A", + "nLeftrightarrow;": "\u21CE", + "nleftrightarrow;": "\u21AE", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2A7D\u0338", + "nles;": "\u2A7D\u0338", + "nless;": "\u226E", + "nLl;": "\u22D8\u0338", + "nlsim;": "\u2274", + "nLt;": "\u226A\u20D2", + "nlt;": "\u226E", + "nltri;": "\u22EA", + "nltrie;": "\u22EC", + "nLtv;": "\u226A\u0338", + "nmid;": "\u2224", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\u00A0", + "Nopf;": "\u2115", + "nopf;": "\uD835\uDD5F", + "Not;": "\u2AEC", + "not;": "\u00AC", + "not": "\u00AC", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226D", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226F", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226B\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2A7E\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224E\u0338", + "NotHumpEqual;": "\u224F\u0338", + "notin;": "\u2209", + "notindot;": "\u22F5\u0338", + "notinE;": "\u22F9\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22F7", + "notinvc;": "\u22F6", + "NotLeftTriangle;": "\u22EA", + "NotLeftTriangleBar;": "\u29CF\u0338", + "NotLeftTriangleEqual;": "\u22EC", + "NotLess;": "\u226E", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226A\u0338", + "NotLessSlantEqual;": "\u2A7D\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2AA2\u0338", + "NotNestedLessLess;": "\u2AA1\u0338", + "notni;": "\u220C", + "notniva;": "\u220C", + "notnivb;": "\u22FE", + "notnivc;": "\u22FD", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2AAF\u0338", + "NotPrecedesSlantEqual;": "\u22E0", + "NotReverseElement;": "\u220C", + "NotRightTriangle;": "\u22EB", + "NotRightTriangleBar;": "\u29D0\u0338", + "NotRightTriangleEqual;": "\u22ED", + "NotSquareSubset;": "\u228F\u0338", + "NotSquareSubsetEqual;": "\u22E2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22E3", + "NotSubset;": "\u2282\u20D2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2AB0\u0338", + "NotSucceedsSlantEqual;": "\u22E1", + "NotSucceedsTilde;": "\u227F\u0338", + "NotSuperset;": "\u2283\u20D2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2AFD\u20E5", + "npart;": "\u2202\u0338", + "npolint;": "\u2A14", + "npr;": "\u2280", + "nprcue;": "\u22E0", + "npre;": "\u2AAF\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2AAF\u0338", + "nrArr;": "\u21CF", + "nrarr;": "\u219B", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219D\u0338", + "nRightarrow;": "\u21CF", + "nrightarrow;": "\u219B", + "nrtri;": "\u22EB", + "nrtrie;": "\u22ED", + "nsc;": "\u2281", + "nsccue;": "\u22E1", + "nsce;": "\u2AB0\u0338", + "Nscr;": "\uD835\uDCA9", + "nscr;": "\uD835\uDCC3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22E2", + "nsqsupe;": "\u22E3", + "nsub;": "\u2284", + "nsubE;": "\u2AC5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20D2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2AC5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2AB0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2AC6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20D2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2AC6\u0338", + "ntgl;": "\u2279", + "Ntilde;": "\u00D1", + "Ntilde": "\u00D1", + "ntilde;": "\u00F1", + "ntilde": "\u00F1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22EA", + "ntrianglelefteq;": "\u22EC", + "ntriangleright;": "\u22EB", + "ntrianglerighteq;": "\u22ED", + "Nu;": "\u039D", + "nu;": "\u03BD", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvap;": "\u224D\u20D2", + "nVDash;": "\u22AF", + "nVdash;": "\u22AE", + "nvDash;": "\u22AD", + "nvdash;": "\u22AC", + "nvge;": "\u2265\u20D2", + "nvgt;": ">\u20D2", + "nvHarr;": "\u2904", + "nvinfin;": "\u29DE", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20D2", + "nvlt;": "<\u20D2", + "nvltrie;": "\u22B4\u20D2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22B5\u20D2", + "nvsim;": "\u223C\u20D2", + "nwarhk;": "\u2923", + "nwArr;": "\u21D6", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "Oacute;": "\u00D3", + "Oacute": "\u00D3", + "oacute;": "\u00F3", + "oacute": "\u00F3", + "oast;": "\u229B", + "ocir;": "\u229A", + "Ocirc;": "\u00D4", + "Ocirc": "\u00D4", + "ocirc;": "\u00F4", + "ocirc": "\u00F4", + "Ocy;": "\u041E", + "ocy;": "\u043E", + "odash;": "\u229D", + "Odblac;": "\u0150", + "odblac;": "\u0151", + "odiv;": "\u2A38", + "odot;": "\u2299", + "odsold;": "\u29BC", + "OElig;": "\u0152", + "oelig;": "\u0153", + "ofcir;": "\u29BF", + "Ofr;": "\uD835\uDD12", + "ofr;": "\uD835\uDD2C", + "ogon;": "\u02DB", + "Ograve;": "\u00D2", + "Ograve": "\u00D2", + "ograve;": "\u00F2", + "ograve": "\u00F2", + "ogt;": "\u29C1", + "ohbar;": "\u29B5", + "ohm;": "\u03A9", + "oint;": "\u222E", + "olarr;": "\u21BA", + "olcir;": "\u29BE", + "olcross;": "\u29BB", + "oline;": "\u203E", + "olt;": "\u29C0", + "Omacr;": "\u014C", + "omacr;": "\u014D", + "Omega;": "\u03A9", + "omega;": "\u03C9", + "Omicron;": "\u039F", + "omicron;": "\u03BF", + "omid;": "\u29B6", + "ominus;": "\u2296", + "Oopf;": "\uD835\uDD46", + "oopf;": "\uD835\uDD60", + "opar;": "\u29B7", + "OpenCurlyDoubleQuote;": "\u201C", + "OpenCurlyQuote;": "\u2018", + "operp;": "\u29B9", + "oplus;": "\u2295", + "Or;": "\u2A54", + "or;": "\u2228", + "orarr;": "\u21BB", + "ord;": "\u2A5D", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf;": "\u00AA", + "ordf": "\u00AA", + "ordm;": "\u00BA", + "ordm": "\u00BA", + "origof;": "\u22B6", + "oror;": "\u2A56", + "orslope;": "\u2A57", + "orv;": "\u2A5B", + "oS;": "\u24C8", + "Oscr;": "\uD835\uDCAA", + "oscr;": "\u2134", + "Oslash;": "\u00D8", + "Oslash": "\u00D8", + "oslash;": "\u00F8", + "oslash": "\u00F8", + "osol;": "\u2298", + "Otilde;": "\u00D5", + "Otilde": "\u00D5", + "otilde;": "\u00F5", + "otilde": "\u00F5", + "Otimes;": "\u2A37", + "otimes;": "\u2297", + "otimesas;": "\u2A36", + "Ouml;": "\u00D6", + "Ouml": "\u00D6", + "ouml;": "\u00F6", + "ouml": "\u00F6", + "ovbar;": "\u233D", + "OverBar;": "\u203E", + "OverBrace;": "\u23DE", + "OverBracket;": "\u23B4", + "OverParenthesis;": "\u23DC", + "par;": "\u2225", + "para;": "\u00B6", + "para": "\u00B6", + "parallel;": "\u2225", + "parsim;": "\u2AF3", + "parsl;": "\u2AFD", + "part;": "\u2202", + "PartialD;": "\u2202", + "Pcy;": "\u041F", + "pcy;": "\u043F", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22A5", + "pertenk;": "\u2031", + "Pfr;": "\uD835\uDD13", + "pfr;": "\uD835\uDD2D", + "Phi;": "\u03A6", + "phi;": "\u03C6", + "phiv;": "\u03D5", + "phmmat;": "\u2133", + "phone;": "\u260E", + "Pi;": "\u03A0", + "pi;": "\u03C0", + "pitchfork;": "\u22D4", + "piv;": "\u03D6", + "planck;": "\u210F", + "planckh;": "\u210E", + "plankv;": "\u210F", + "plus;": "+", + "plusacir;": "\u2A23", + "plusb;": "\u229E", + "pluscir;": "\u2A22", + "plusdo;": "\u2214", + "plusdu;": "\u2A25", + "pluse;": "\u2A72", + "PlusMinus;": "\u00B1", + "plusmn;": "\u00B1", + "plusmn": "\u00B1", + "plussim;": "\u2A26", + "plustwo;": "\u2A27", + "pm;": "\u00B1", + "Poincareplane;": "\u210C", + "pointint;": "\u2A15", + "Popf;": "\u2119", + "popf;": "\uD835\uDD61", + "pound;": "\u00A3", + "pound": "\u00A3", + "Pr;": "\u2ABB", + "pr;": "\u227A", + "prap;": "\u2AB7", + "prcue;": "\u227C", + "prE;": "\u2AB3", + "pre;": "\u2AAF", + "prec;": "\u227A", + "precapprox;": "\u2AB7", + "preccurlyeq;": "\u227C", + "Precedes;": "\u227A", + "PrecedesEqual;": "\u2AAF", + "PrecedesSlantEqual;": "\u227C", + "PrecedesTilde;": "\u227E", + "preceq;": "\u2AAF", + "precnapprox;": "\u2AB9", + "precneqq;": "\u2AB5", + "precnsim;": "\u22E8", + "precsim;": "\u227E", + "Prime;": "\u2033", + "prime;": "\u2032", + "primes;": "\u2119", + "prnap;": "\u2AB9", + "prnE;": "\u2AB5", + "prnsim;": "\u22E8", + "prod;": "\u220F", + "Product;": "\u220F", + "profalar;": "\u232E", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221D", + "Proportion;": "\u2237", + "Proportional;": "\u221D", + "propto;": "\u221D", + "prsim;": "\u227E", + "prurel;": "\u22B0", + "Pscr;": "\uD835\uDCAB", + "pscr;": "\uD835\uDCC5", + "Psi;": "\u03A8", + "psi;": "\u03C8", + "puncsp;": "\u2008", + "Qfr;": "\uD835\uDD14", + "qfr;": "\uD835\uDD2E", + "qint;": "\u2A0C", + "Qopf;": "\u211A", + "qopf;": "\uD835\uDD62", + "qprime;": "\u2057", + "Qscr;": "\uD835\uDCAC", + "qscr;": "\uD835\uDCC6", + "quaternions;": "\u210D", + "quatint;": "\u2A16", + "quest;": "?", + "questeq;": "\u225F", + "QUOT;": "\"", + "QUOT": "\"", + "quot;": "\"", + "quot": "\"", + "rAarr;": "\u21DB", + "race;": "\u223D\u0331", + "Racute;": "\u0154", + "racute;": "\u0155", + "radic;": "\u221A", + "raemptyv;": "\u29B3", + "Rang;": "\u27EB", + "rang;": "\u27E9", + "rangd;": "\u2992", + "range;": "\u29A5", + "rangle;": "\u27E9", + "raquo;": "\u00BB", + "raquo": "\u00BB", + "Rarr;": "\u21A0", + "rArr;": "\u21D2", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21E5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291E", + "rarrhk;": "\u21AA", + "rarrlp;": "\u21AC", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "Rarrtl;": "\u2916", + "rarrtl;": "\u21A3", + "rarrw;": "\u219D", + "rAtail;": "\u291C", + "ratail;": "\u291A", + "ratio;": "\u2236", + "rationals;": "\u211A", + "RBarr;": "\u2910", + "rBarr;": "\u290F", + "rbarr;": "\u290D", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298C", + "rbrksld;": "\u298E", + "rbrkslu;": "\u2990", + "Rcaron;": "\u0158", + "rcaron;": "\u0159", + "Rcedil;": "\u0156", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "Rcy;": "\u0420", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201D", + "rdquor;": "\u201D", + "rdsh;": "\u21B3", + "Re;": "\u211C", + "real;": "\u211C", + "realine;": "\u211B", + "realpart;": "\u211C", + "reals;": "\u211D", + "rect;": "\u25AD", + "REG;": "\u00AE", + "REG": "\u00AE", + "reg;": "\u00AE", + "reg": "\u00AE", + "ReverseElement;": "\u220B", + "ReverseEquilibrium;": "\u21CB", + "ReverseUpEquilibrium;": "\u296F", + "rfisht;": "\u297D", + "rfloor;": "\u230B", + "Rfr;": "\u211C", + "rfr;": "\uD835\uDD2F", + "rHar;": "\u2964", + "rhard;": "\u21C1", + "rharu;": "\u21C0", + "rharul;": "\u296C", + "Rho;": "\u03A1", + "rho;": "\u03C1", + "rhov;": "\u03F1", + "RightAngleBracket;": "\u27E9", + "RightArrow;": "\u2192", + "Rightarrow;": "\u21D2", + "rightarrow;": "\u2192", + "RightArrowBar;": "\u21E5", + "RightArrowLeftArrow;": "\u21C4", + "rightarrowtail;": "\u21A3", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27E7", + "RightDownTeeVector;": "\u295D", + "RightDownVector;": "\u21C2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230B", + "rightharpoondown;": "\u21C1", + "rightharpoonup;": "\u21C0", + "rightleftarrows;": "\u21C4", + "rightleftharpoons;": "\u21CC", + "rightrightarrows;": "\u21C9", + "rightsquigarrow;": "\u219D", + "RightTee;": "\u22A2", + "RightTeeArrow;": "\u21A6", + "RightTeeVector;": "\u295B", + "rightthreetimes;": "\u22CC", + "RightTriangle;": "\u22B3", + "RightTriangleBar;": "\u29D0", + "RightTriangleEqual;": "\u22B5", + "RightUpDownVector;": "\u294F", + "RightUpTeeVector;": "\u295C", + "RightUpVector;": "\u21BE", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21C0", + "RightVectorBar;": "\u2953", + "ring;": "\u02DA", + "risingdotseq;": "\u2253", + "rlarr;": "\u21C4", + "rlhar;": "\u21CC", + "rlm;": "\u200F", + "rmoust;": "\u23B1", + "rmoustache;": "\u23B1", + "rnmid;": "\u2AEE", + "roang;": "\u27ED", + "roarr;": "\u21FE", + "robrk;": "\u27E7", + "ropar;": "\u2986", + "Ropf;": "\u211D", + "ropf;": "\uD835\uDD63", + "roplus;": "\u2A2E", + "rotimes;": "\u2A35", + "RoundImplies;": "\u2970", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2A12", + "rrarr;": "\u21C9", + "Rrightarrow;": "\u21DB", + "rsaquo;": "\u203A", + "Rscr;": "\u211B", + "rscr;": "\uD835\uDCC7", + "Rsh;": "\u21B1", + "rsh;": "\u21B1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22CC", + "rtimes;": "\u22CA", + "rtri;": "\u25B9", + "rtrie;": "\u22B5", + "rtrif;": "\u25B8", + "rtriltri;": "\u29CE", + "RuleDelayed;": "\u29F4", + "ruluhar;": "\u2968", + "rx;": "\u211E", + "Sacute;": "\u015A", + "sacute;": "\u015B", + "sbquo;": "\u201A", + "Sc;": "\u2ABC", + "sc;": "\u227B", + "scap;": "\u2AB8", + "Scaron;": "\u0160", + "scaron;": "\u0161", + "sccue;": "\u227D", + "scE;": "\u2AB4", + "sce;": "\u2AB0", + "Scedil;": "\u015E", + "scedil;": "\u015F", + "Scirc;": "\u015C", + "scirc;": "\u015D", + "scnap;": "\u2ABA", + "scnE;": "\u2AB6", + "scnsim;": "\u22E9", + "scpolint;": "\u2A13", + "scsim;": "\u227F", + "Scy;": "\u0421", + "scy;": "\u0441", + "sdot;": "\u22C5", + "sdotb;": "\u22A1", + "sdote;": "\u2A66", + "searhk;": "\u2925", + "seArr;": "\u21D8", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect;": "\u00A7", + "sect": "\u00A7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "Sfr;": "\uD835\uDD16", + "sfr;": "\uD835\uDD30", + "sfrown;": "\u2322", + "sharp;": "\u266F", + "SHCHcy;": "\u0429", + "shchcy;": "\u0449", + "SHcy;": "\u0428", + "shcy;": "\u0448", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "shy;": "\u00AD", + "shy": "\u00AD", + "Sigma;": "\u03A3", + "sigma;": "\u03C3", + "sigmaf;": "\u03C2", + "sigmav;": "\u03C2", + "sim;": "\u223C", + "simdot;": "\u2A6A", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2A9E", + "simgE;": "\u2AA0", + "siml;": "\u2A9D", + "simlE;": "\u2A9F", + "simne;": "\u2246", + "simplus;": "\u2A24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "SmallCircle;": "\u2218", + "smallsetminus;": "\u2216", + "smashp;": "\u2A33", + "smeparsl;": "\u29E4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2AAA", + "smte;": "\u2AAC", + "smtes;": "\u2AAC\uFE00", + "SOFTcy;": "\u042C", + "softcy;": "\u044C", + "sol;": "/", + "solb;": "\u29C4", + "solbar;": "\u233F", + "Sopf;": "\uD835\uDD4A", + "sopf;": "\uD835\uDD64", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\uFE00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\uFE00", + "Sqrt;": "\u221A", + "sqsub;": "\u228F", + "sqsube;": "\u2291", + "sqsubset;": "\u228F", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25A1", + "Square;": "\u25A1", + "square;": "\u25A1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228F", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "squarf;": "\u25AA", + "squf;": "\u25AA", + "srarr;": "\u2192", + "Sscr;": "\uD835\uDCAE", + "sscr;": "\uD835\uDCC8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22C6", + "Star;": "\u22C6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03F5", + "straightphi;": "\u03D5", + "strns;": "\u00AF", + "Sub;": "\u22D0", + "sub;": "\u2282", + "subdot;": "\u2ABD", + "subE;": "\u2AC5", + "sube;": "\u2286", + "subedot;": "\u2AC3", + "submult;": "\u2AC1", + "subnE;": "\u2ACB", + "subne;": "\u228A", + "subplus;": "\u2ABF", + "subrarr;": "\u2979", + "Subset;": "\u22D0", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2AC5", + "SubsetEqual;": "\u2286", + "subsetneq;": "\u228A", + "subsetneqq;": "\u2ACB", + "subsim;": "\u2AC7", + "subsub;": "\u2AD5", + "subsup;": "\u2AD3", + "succ;": "\u227B", + "succapprox;": "\u2AB8", + "succcurlyeq;": "\u227D", + "Succeeds;": "\u227B", + "SucceedsEqual;": "\u2AB0", + "SucceedsSlantEqual;": "\u227D", + "SucceedsTilde;": "\u227F", + "succeq;": "\u2AB0", + "succnapprox;": "\u2ABA", + "succneqq;": "\u2AB6", + "succnsim;": "\u22E9", + "succsim;": "\u227F", + "SuchThat;": "\u220B", + "Sum;": "\u2211", + "sum;": "\u2211", + "sung;": "\u266A", + "Sup;": "\u22D1", + "sup;": "\u2283", + "sup1;": "\u00B9", + "sup1": "\u00B9", + "sup2;": "\u00B2", + "sup2": "\u00B2", + "sup3;": "\u00B3", + "sup3": "\u00B3", + "supdot;": "\u2ABE", + "supdsub;": "\u2AD8", + "supE;": "\u2AC6", + "supe;": "\u2287", + "supedot;": "\u2AC4", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "suphsol;": "\u27C9", + "suphsub;": "\u2AD7", + "suplarr;": "\u297B", + "supmult;": "\u2AC2", + "supnE;": "\u2ACC", + "supne;": "\u228B", + "supplus;": "\u2AC0", + "Supset;": "\u22D1", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2AC6", + "supsetneq;": "\u228B", + "supsetneqq;": "\u2ACC", + "supsim;": "\u2AC8", + "supsub;": "\u2AD4", + "supsup;": "\u2AD6", + "swarhk;": "\u2926", + "swArr;": "\u21D9", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292A", + "szlig;": "\u00DF", + "szlig": "\u00DF", + "Tab;": "\t", + "target;": "\u2316", + "Tau;": "\u03A4", + "tau;": "\u03C4", + "tbrk;": "\u23B4", + "Tcaron;": "\u0164", + "tcaron;": "\u0165", + "Tcedil;": "\u0162", + "tcedil;": "\u0163", + "Tcy;": "\u0422", + "tcy;": "\u0442", + "tdot;": "\u20DB", + "telrec;": "\u2315", + "Tfr;": "\uD835\uDD17", + "tfr;": "\uD835\uDD31", + "there4;": "\u2234", + "Therefore;": "\u2234", + "therefore;": "\u2234", + "Theta;": "\u0398", + "theta;": "\u03B8", + "thetasym;": "\u03D1", + "thetav;": "\u03D1", + "thickapprox;": "\u2248", + "thicksim;": "\u223C", + "ThickSpace;": "\u205F\u200A", + "thinsp;": "\u2009", + "ThinSpace;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223C", + "THORN;": "\u00DE", + "THORN": "\u00DE", + "thorn;": "\u00FE", + "thorn": "\u00FE", + "Tilde;": "\u223C", + "tilde;": "\u02DC", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "times;": "\u00D7", + "times": "\u00D7", + "timesb;": "\u22A0", + "timesbar;": "\u2A31", + "timesd;": "\u2A30", + "tint;": "\u222D", + "toea;": "\u2928", + "top;": "\u22A4", + "topbot;": "\u2336", + "topcir;": "\u2AF1", + "Topf;": "\uD835\uDD4B", + "topf;": "\uD835\uDD65", + "topfork;": "\u2ADA", + "tosa;": "\u2929", + "tprime;": "\u2034", + "TRADE;": "\u2122", + "trade;": "\u2122", + "triangle;": "\u25B5", + "triangledown;": "\u25BF", + "triangleleft;": "\u25C3", + "trianglelefteq;": "\u22B4", + "triangleq;": "\u225C", + "triangleright;": "\u25B9", + "trianglerighteq;": "\u22B5", + "tridot;": "\u25EC", + "trie;": "\u225C", + "triminus;": "\u2A3A", + "TripleDot;": "\u20DB", + "triplus;": "\u2A39", + "trisb;": "\u29CD", + "tritime;": "\u2A3B", + "trpezium;": "\u23E2", + "Tscr;": "\uD835\uDCAF", + "tscr;": "\uD835\uDCC9", + "TScy;": "\u0426", + "tscy;": "\u0446", + "TSHcy;": "\u040B", + "tshcy;": "\u045B", + "Tstrok;": "\u0166", + "tstrok;": "\u0167", + "twixt;": "\u226C", + "twoheadleftarrow;": "\u219E", + "twoheadrightarrow;": "\u21A0", + "Uacute;": "\u00DA", + "Uacute": "\u00DA", + "uacute;": "\u00FA", + "uacute": "\u00FA", + "Uarr;": "\u219F", + "uArr;": "\u21D1", + "uarr;": "\u2191", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040E", + "ubrcy;": "\u045E", + "Ubreve;": "\u016C", + "ubreve;": "\u016D", + "Ucirc;": "\u00DB", + "Ucirc": "\u00DB", + "ucirc;": "\u00FB", + "ucirc": "\u00FB", + "Ucy;": "\u0423", + "ucy;": "\u0443", + "udarr;": "\u21C5", + "Udblac;": "\u0170", + "udblac;": "\u0171", + "udhar;": "\u296E", + "ufisht;": "\u297E", + "Ufr;": "\uD835\uDD18", + "ufr;": "\uD835\uDD32", + "Ugrave;": "\u00D9", + "Ugrave": "\u00D9", + "ugrave;": "\u00F9", + "ugrave": "\u00F9", + "uHar;": "\u2963", + "uharl;": "\u21BF", + "uharr;": "\u21BE", + "uhblk;": "\u2580", + "ulcorn;": "\u231C", + "ulcorner;": "\u231C", + "ulcrop;": "\u230F", + "ultri;": "\u25F8", + "Umacr;": "\u016A", + "umacr;": "\u016B", + "uml;": "\u00A8", + "uml": "\u00A8", + "UnderBar;": "_", + "UnderBrace;": "\u23DF", + "UnderBracket;": "\u23B5", + "UnderParenthesis;": "\u23DD", + "Union;": "\u22C3", + "UnionPlus;": "\u228E", + "Uogon;": "\u0172", + "uogon;": "\u0173", + "Uopf;": "\uD835\uDD4C", + "uopf;": "\uD835\uDD66", + "UpArrow;": "\u2191", + "Uparrow;": "\u21D1", + "uparrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21C5", + "UpDownArrow;": "\u2195", + "Updownarrow;": "\u21D5", + "updownarrow;": "\u2195", + "UpEquilibrium;": "\u296E", + "upharpoonleft;": "\u21BF", + "upharpoonright;": "\u21BE", + "uplus;": "\u228E", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03D2", + "upsi;": "\u03C5", + "upsih;": "\u03D2", + "Upsilon;": "\u03A5", + "upsilon;": "\u03C5", + "UpTee;": "\u22A5", + "UpTeeArrow;": "\u21A5", + "upuparrows;": "\u21C8", + "urcorn;": "\u231D", + "urcorner;": "\u231D", + "urcrop;": "\u230E", + "Uring;": "\u016E", + "uring;": "\u016F", + "urtri;": "\u25F9", + "Uscr;": "\uD835\uDCB0", + "uscr;": "\uD835\uDCCA", + "utdot;": "\u22F0", + "Utilde;": "\u0168", + "utilde;": "\u0169", + "utri;": "\u25B5", + "utrif;": "\u25B4", + "uuarr;": "\u21C8", + "Uuml;": "\u00DC", + "Uuml": "\u00DC", + "uuml;": "\u00FC", + "uuml": "\u00FC", + "uwangle;": "\u29A7", + "vangrt;": "\u299C", + "varepsilon;": "\u03F5", + "varkappa;": "\u03F0", + "varnothing;": "\u2205", + "varphi;": "\u03D5", + "varpi;": "\u03D6", + "varpropto;": "\u221D", + "vArr;": "\u21D5", + "varr;": "\u2195", + "varrho;": "\u03F1", + "varsigma;": "\u03C2", + "varsubsetneq;": "\u228A\uFE00", + "varsubsetneqq;": "\u2ACB\uFE00", + "varsupsetneq;": "\u228B\uFE00", + "varsupsetneqq;": "\u2ACC\uFE00", + "vartheta;": "\u03D1", + "vartriangleleft;": "\u22B2", + "vartriangleright;": "\u22B3", + "Vbar;": "\u2AEB", + "vBar;": "\u2AE8", + "vBarv;": "\u2AE9", + "Vcy;": "\u0412", + "vcy;": "\u0432", + "VDash;": "\u22AB", + "Vdash;": "\u22A9", + "vDash;": "\u22A8", + "vdash;": "\u22A2", + "Vdashl;": "\u2AE6", + "Vee;": "\u22C1", + "vee;": "\u2228", + "veebar;": "\u22BB", + "veeeq;": "\u225A", + "vellip;": "\u22EE", + "Verbar;": "\u2016", + "verbar;": "|", + "Vert;": "\u2016", + "vert;": "|", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200A", + "Vfr;": "\uD835\uDD19", + "vfr;": "\uD835\uDD33", + "vltri;": "\u22B2", + "vnsub;": "\u2282\u20D2", + "vnsup;": "\u2283\u20D2", + "Vopf;": "\uD835\uDD4D", + "vopf;": "\uD835\uDD67", + "vprop;": "\u221D", + "vrtri;": "\u22B3", + "Vscr;": "\uD835\uDCB1", + "vscr;": "\uD835\uDCCB", + "vsubnE;": "\u2ACB\uFE00", + "vsubne;": "\u228A\uFE00", + "vsupnE;": "\u2ACC\uFE00", + "vsupne;": "\u228B\uFE00", + "Vvdash;": "\u22AA", + "vzigzag;": "\u299A", + "Wcirc;": "\u0174", + "wcirc;": "\u0175", + "wedbar;": "\u2A5F", + "Wedge;": "\u22C0", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "Wfr;": "\uD835\uDD1A", + "wfr;": "\uD835\uDD34", + "Wopf;": "\uD835\uDD4E", + "wopf;": "\uD835\uDD68", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "Wscr;": "\uD835\uDCB2", + "wscr;": "\uD835\uDCCC", + "xcap;": "\u22C2", + "xcirc;": "\u25EF", + "xcup;": "\u22C3", + "xdtri;": "\u25BD", + "Xfr;": "\uD835\uDD1B", + "xfr;": "\uD835\uDD35", + "xhArr;": "\u27FA", + "xharr;": "\u27F7", + "Xi;": "\u039E", + "xi;": "\u03BE", + "xlArr;": "\u27F8", + "xlarr;": "\u27F5", + "xmap;": "\u27FC", + "xnis;": "\u22FB", + "xodot;": "\u2A00", + "Xopf;": "\uD835\uDD4F", + "xopf;": "\uD835\uDD69", + "xoplus;": "\u2A01", + "xotime;": "\u2A02", + "xrArr;": "\u27F9", + "xrarr;": "\u27F6", + "Xscr;": "\uD835\uDCB3", + "xscr;": "\uD835\uDCCD", + "xsqcup;": "\u2A06", + "xuplus;": "\u2A04", + "xutri;": "\u25B3", + "xvee;": "\u22C1", + "xwedge;": "\u22C0", + "Yacute;": "\u00DD", + "Yacute": "\u00DD", + "yacute;": "\u00FD", + "yacute": "\u00FD", + "YAcy;": "\u042F", + "yacy;": "\u044F", + "Ycirc;": "\u0176", + "ycirc;": "\u0177", + "Ycy;": "\u042B", + "ycy;": "\u044B", + "yen;": "\u00A5", + "yen": "\u00A5", + "Yfr;": "\uD835\uDD1C", + "yfr;": "\uD835\uDD36", + "YIcy;": "\u0407", + "yicy;": "\u0457", + "Yopf;": "\uD835\uDD50", + "yopf;": "\uD835\uDD6A", + "Yscr;": "\uD835\uDCB4", + "yscr;": "\uD835\uDCCE", + "YUcy;": "\u042E", + "yucy;": "\u044E", + "Yuml;": "\u0178", + "yuml;": "\u00FF", + "yuml": "\u00FF", + "Zacute;": "\u0179", + "zacute;": "\u017A", + "Zcaron;": "\u017D", + "zcaron;": "\u017E", + "Zcy;": "\u0417", + "zcy;": "\u0437", + "Zdot;": "\u017B", + "zdot;": "\u017C", + "zeetrf;": "\u2128", + "ZeroWidthSpace;": "\u200B", + "Zeta;": "\u0396", + "zeta;": "\u03B6", + "Zfr;": "\u2128", + "zfr;": "\uD835\uDD37", + "ZHcy;": "\u0416", + "zhcy;": "\u0436", + "zigrarr;": "\u21DD", + "Zopf;": "\u2124", + "zopf;": "\uD835\uDD6B", + "Zscr;": "\uD835\uDCB5", + "zscr;": "\uD835\uDCCF", + "zwj;": "\u200D", + "zwnj;": "\u200C" +} \ No newline at end of file diff --git a/node_modules/ent/examples/simple.js b/node_modules/ent/examples/simple.js new file mode 100644 index 0000000..48c4e6f --- /dev/null +++ b/node_modules/ent/examples/simple.js @@ -0,0 +1,3 @@ +var ent = require('ent'); +console.log(ent.encode('©moo')) +console.log(ent.decode('π & ρ')); diff --git a/node_modules/ent/index.js b/node_modules/ent/index.js new file mode 100644 index 0000000..8dae629 --- /dev/null +++ b/node_modules/ent/index.js @@ -0,0 +1,2 @@ +exports.encode = require('./encode'); +exports.decode = require('./decode'); diff --git a/node_modules/ent/package.json b/node_modules/ent/package.json new file mode 100644 index 0000000..e90fcd0 --- /dev/null +++ b/node_modules/ent/package.json @@ -0,0 +1,34 @@ +{ + "name" : "ent", + "description" : "Encode and decode HTML entities", + "version" : "2.2.0", + "repository" : "https://github.com/substack/node-ent.git", + "author" : "James Halliday (http://substack.net)", + "main" : "./index.js", + "keywords" : [ + "entities", + "entitify", + "entity", + "html", + "encode", + "decode" + ], + "license" : "MIT", + "scripts" : { + "test" : "tape test/*.js", + "prepublish" : "node build/index.js" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : [ + "ie/6..latest", + "ff/3.5", "ff/latest", + "chrome/10", "chrome/latest", + "safari/latest", + "opera/latest" + ] + }, + "devDependencies" : { + "tape" : "~2.3.2" + } +} diff --git a/node_modules/ent/readme.markdown b/node_modules/ent/readme.markdown new file mode 100644 index 0000000..a09b480 --- /dev/null +++ b/node_modules/ent/readme.markdown @@ -0,0 +1,71 @@ +# ent + +Encode and decode HTML entities + +[![browser support](http://ci.testling.com/substack/node-ent.png)](http://ci.testling.com/substack/node-ent) + +[![build status](https://secure.travis-ci.org/substack/node-ent.png)](http://travis-ci.org/substack/node-ent) + +# example + +``` js +var ent = require('ent'); +console.log(ent.encode('©moo')) +console.log(ent.decode('π & ρ')); +``` + +``` +<span>©moo</span> +π & ρ +``` + +![ent](http://substack.net/images/ent.png) + +# methods + +``` js +var ent = require('ent'); +var encode = require('ent/encode'); +var decode = require('ent/decode'); +``` + +## encode(str, opts={}) + +Escape unsafe characters in `str` with html entities. + +By default, entities are encoded with numeric decimal codes. + +If `opts.numeric` is false or `opts.named` is true, encoding will used named +codes like `π`. + +If `opts.special` is set to an Object, the key names will be forced +to be encoded (defaults to forcing: `<>'"&`). For example: + +``` js +console.log(encode('hello', { special: { l: true } })); +``` + +``` +hello +``` + +## decode(str) + +Convert html entities in `str` back to raw text. + +# credits + +HTML entity tables shamelessly lifted from perl's +[HTML::Entities](http://cpansearch.perl.org/src/GAAS/HTML-Parser-3.68/lib/HTML/Entities.pm) + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install ent +``` + +# license + +MIT diff --git a/node_modules/ent/reversed.json b/node_modules/ent/reversed.json new file mode 100644 index 0000000..d9a53a5 --- /dev/null +++ b/node_modules/ent/reversed.json @@ -0,0 +1,1315 @@ +{ + "9": "Tab;", + "10": "NewLine;", + "33": "excl;", + "34": "quot;", + "35": "num;", + "36": "dollar;", + "37": "percnt;", + "38": "amp;", + "39": "apos;", + "40": "lpar;", + "41": "rpar;", + "42": "midast;", + "43": "plus;", + "44": "comma;", + "46": "period;", + "47": "sol;", + "58": "colon;", + "59": "semi;", + "60": "lt;", + "61": "equals;", + "62": "gt;", + "63": "quest;", + "64": "commat;", + "91": "lsqb;", + "92": "bsol;", + "93": "rsqb;", + "94": "Hat;", + "95": "UnderBar;", + "96": "grave;", + "123": "lcub;", + "124": "VerticalLine;", + "125": "rcub;", + "160": "NonBreakingSpace;", + "161": "iexcl;", + "162": "cent;", + "163": "pound;", + "164": "curren;", + "165": "yen;", + "166": "brvbar;", + "167": "sect;", + "168": "uml;", + "169": "copy;", + "170": "ordf;", + "171": "laquo;", + "172": "not;", + "173": "shy;", + "174": "reg;", + "175": "strns;", + "176": "deg;", + "177": "pm;", + "178": "sup2;", + "179": "sup3;", + "180": "DiacriticalAcute;", + "181": "micro;", + "182": "para;", + "183": "middot;", + "184": "Cedilla;", + "185": "sup1;", + "186": "ordm;", + "187": "raquo;", + "188": "frac14;", + "189": "half;", + "190": "frac34;", + "191": "iquest;", + "192": "Agrave;", + "193": "Aacute;", + "194": "Acirc;", + "195": "Atilde;", + "196": "Auml;", + "197": "Aring;", + "198": "AElig;", + "199": "Ccedil;", + "200": "Egrave;", + "201": "Eacute;", + "202": "Ecirc;", + "203": "Euml;", + "204": "Igrave;", + "205": "Iacute;", + "206": "Icirc;", + "207": "Iuml;", + "208": "ETH;", + "209": "Ntilde;", + "210": "Ograve;", + "211": "Oacute;", + "212": "Ocirc;", + "213": "Otilde;", + "214": "Ouml;", + "215": "times;", + "216": "Oslash;", + "217": "Ugrave;", + "218": "Uacute;", + "219": "Ucirc;", + "220": "Uuml;", + "221": "Yacute;", + "222": "THORN;", + "223": "szlig;", + "224": "agrave;", + "225": "aacute;", + "226": "acirc;", + "227": "atilde;", + "228": "auml;", + "229": "aring;", + "230": "aelig;", + "231": "ccedil;", + "232": "egrave;", + "233": "eacute;", + "234": "ecirc;", + "235": "euml;", + "236": "igrave;", + "237": "iacute;", + "238": "icirc;", + "239": "iuml;", + "240": "eth;", + "241": "ntilde;", + "242": "ograve;", + "243": "oacute;", + "244": "ocirc;", + "245": "otilde;", + "246": "ouml;", + "247": "divide;", + "248": "oslash;", + "249": "ugrave;", + "250": "uacute;", + "251": "ucirc;", + "252": "uuml;", + "253": "yacute;", + "254": "thorn;", + "255": "yuml;", + "256": "Amacr;", + "257": "amacr;", + "258": "Abreve;", + "259": "abreve;", + "260": "Aogon;", + "261": "aogon;", + "262": "Cacute;", + "263": "cacute;", + "264": "Ccirc;", + "265": "ccirc;", + "266": "Cdot;", + "267": "cdot;", + "268": "Ccaron;", + "269": "ccaron;", + "270": "Dcaron;", + "271": "dcaron;", + "272": "Dstrok;", + "273": "dstrok;", + "274": "Emacr;", + "275": "emacr;", + "278": "Edot;", + "279": "edot;", + "280": "Eogon;", + "281": "eogon;", + "282": "Ecaron;", + "283": "ecaron;", + "284": "Gcirc;", + "285": "gcirc;", + "286": "Gbreve;", + "287": "gbreve;", + "288": "Gdot;", + "289": "gdot;", + "290": "Gcedil;", + "292": "Hcirc;", + "293": "hcirc;", + "294": "Hstrok;", + "295": "hstrok;", + "296": "Itilde;", + "297": "itilde;", + "298": "Imacr;", + "299": "imacr;", + "302": "Iogon;", + "303": "iogon;", + "304": "Idot;", + "305": "inodot;", + "306": "IJlig;", + "307": "ijlig;", + "308": "Jcirc;", + "309": "jcirc;", + "310": "Kcedil;", + "311": "kcedil;", + "312": "kgreen;", + "313": "Lacute;", + "314": "lacute;", + "315": "Lcedil;", + "316": "lcedil;", + "317": "Lcaron;", + "318": "lcaron;", + "319": "Lmidot;", + "320": "lmidot;", + "321": "Lstrok;", + "322": "lstrok;", + "323": "Nacute;", + "324": "nacute;", + "325": "Ncedil;", + "326": "ncedil;", + "327": "Ncaron;", + "328": "ncaron;", + "329": "napos;", + "330": "ENG;", + "331": "eng;", + "332": "Omacr;", + "333": "omacr;", + "336": "Odblac;", + "337": "odblac;", + "338": "OElig;", + "339": "oelig;", + "340": "Racute;", + "341": "racute;", + "342": "Rcedil;", + "343": "rcedil;", + "344": "Rcaron;", + "345": "rcaron;", + "346": "Sacute;", + "347": "sacute;", + "348": "Scirc;", + "349": "scirc;", + "350": "Scedil;", + "351": "scedil;", + "352": "Scaron;", + "353": "scaron;", + "354": "Tcedil;", + "355": "tcedil;", + "356": "Tcaron;", + "357": "tcaron;", + "358": "Tstrok;", + "359": "tstrok;", + "360": "Utilde;", + "361": "utilde;", + "362": "Umacr;", + "363": "umacr;", + "364": "Ubreve;", + "365": "ubreve;", + "366": "Uring;", + "367": "uring;", + "368": "Udblac;", + "369": "udblac;", + "370": "Uogon;", + "371": "uogon;", + "372": "Wcirc;", + "373": "wcirc;", + "374": "Ycirc;", + "375": "ycirc;", + "376": "Yuml;", + "377": "Zacute;", + "378": "zacute;", + "379": "Zdot;", + "380": "zdot;", + "381": "Zcaron;", + "382": "zcaron;", + "402": "fnof;", + "437": "imped;", + "501": "gacute;", + "567": "jmath;", + "710": "circ;", + "711": "Hacek;", + "728": "breve;", + "729": "dot;", + "730": "ring;", + "731": "ogon;", + "732": "tilde;", + "733": "DiacriticalDoubleAcute;", + "785": "DownBreve;", + "913": "Alpha;", + "914": "Beta;", + "915": "Gamma;", + "916": "Delta;", + "917": "Epsilon;", + "918": "Zeta;", + "919": "Eta;", + "920": "Theta;", + "921": "Iota;", + "922": "Kappa;", + "923": "Lambda;", + "924": "Mu;", + "925": "Nu;", + "926": "Xi;", + "927": "Omicron;", + "928": "Pi;", + "929": "Rho;", + "931": "Sigma;", + "932": "Tau;", + "933": "Upsilon;", + "934": "Phi;", + "935": "Chi;", + "936": "Psi;", + "937": "Omega;", + "945": "alpha;", + "946": "beta;", + "947": "gamma;", + "948": "delta;", + "949": "epsilon;", + "950": "zeta;", + "951": "eta;", + "952": "theta;", + "953": "iota;", + "954": "kappa;", + "955": "lambda;", + "956": "mu;", + "957": "nu;", + "958": "xi;", + "959": "omicron;", + "960": "pi;", + "961": "rho;", + "962": "varsigma;", + "963": "sigma;", + "964": "tau;", + "965": "upsilon;", + "966": "phi;", + "967": "chi;", + "968": "psi;", + "969": "omega;", + "977": "vartheta;", + "978": "upsih;", + "981": "varphi;", + "982": "varpi;", + "988": "Gammad;", + "989": "gammad;", + "1008": "varkappa;", + "1009": "varrho;", + "1013": "varepsilon;", + "1014": "bepsi;", + "1025": "IOcy;", + "1026": "DJcy;", + "1027": "GJcy;", + "1028": "Jukcy;", + "1029": "DScy;", + "1030": "Iukcy;", + "1031": "YIcy;", + "1032": "Jsercy;", + "1033": "LJcy;", + "1034": "NJcy;", + "1035": "TSHcy;", + "1036": "KJcy;", + "1038": "Ubrcy;", + "1039": "DZcy;", + "1040": "Acy;", + "1041": "Bcy;", + "1042": "Vcy;", + "1043": "Gcy;", + "1044": "Dcy;", + "1045": "IEcy;", + "1046": "ZHcy;", + "1047": "Zcy;", + "1048": "Icy;", + "1049": "Jcy;", + "1050": "Kcy;", + "1051": "Lcy;", + "1052": "Mcy;", + "1053": "Ncy;", + "1054": "Ocy;", + "1055": "Pcy;", + "1056": "Rcy;", + "1057": "Scy;", + "1058": "Tcy;", + "1059": "Ucy;", + "1060": "Fcy;", + "1061": "KHcy;", + "1062": "TScy;", + "1063": "CHcy;", + "1064": "SHcy;", + "1065": "SHCHcy;", + "1066": "HARDcy;", + "1067": "Ycy;", + "1068": "SOFTcy;", + "1069": "Ecy;", + "1070": "YUcy;", + "1071": "YAcy;", + "1072": "acy;", + "1073": "bcy;", + "1074": "vcy;", + "1075": "gcy;", + "1076": "dcy;", + "1077": "iecy;", + "1078": "zhcy;", + "1079": "zcy;", + "1080": "icy;", + "1081": "jcy;", + "1082": "kcy;", + "1083": "lcy;", + "1084": "mcy;", + "1085": "ncy;", + "1086": "ocy;", + "1087": "pcy;", + "1088": "rcy;", + "1089": "scy;", + "1090": "tcy;", + "1091": "ucy;", + "1092": "fcy;", + "1093": "khcy;", + "1094": "tscy;", + "1095": "chcy;", + "1096": "shcy;", + "1097": "shchcy;", + "1098": "hardcy;", + "1099": "ycy;", + "1100": "softcy;", + "1101": "ecy;", + "1102": "yucy;", + "1103": "yacy;", + "1105": "iocy;", + "1106": "djcy;", + "1107": "gjcy;", + "1108": "jukcy;", + "1109": "dscy;", + "1110": "iukcy;", + "1111": "yicy;", + "1112": "jsercy;", + "1113": "ljcy;", + "1114": "njcy;", + "1115": "tshcy;", + "1116": "kjcy;", + "1118": "ubrcy;", + "1119": "dzcy;", + "8194": "ensp;", + "8195": "emsp;", + "8196": "emsp13;", + "8197": "emsp14;", + "8199": "numsp;", + "8200": "puncsp;", + "8201": "ThinSpace;", + "8202": "VeryThinSpace;", + "8203": "ZeroWidthSpace;", + "8204": "zwnj;", + "8205": "zwj;", + "8206": "lrm;", + "8207": "rlm;", + "8208": "hyphen;", + "8211": "ndash;", + "8212": "mdash;", + "8213": "horbar;", + "8214": "Vert;", + "8216": "OpenCurlyQuote;", + "8217": "rsquor;", + "8218": "sbquo;", + "8220": "OpenCurlyDoubleQuote;", + "8221": "rdquor;", + "8222": "ldquor;", + "8224": "dagger;", + "8225": "ddagger;", + "8226": "bullet;", + "8229": "nldr;", + "8230": "mldr;", + "8240": "permil;", + "8241": "pertenk;", + "8242": "prime;", + "8243": "Prime;", + "8244": "tprime;", + "8245": "bprime;", + "8249": "lsaquo;", + "8250": "rsaquo;", + "8254": "OverBar;", + "8257": "caret;", + "8259": "hybull;", + "8260": "frasl;", + "8271": "bsemi;", + "8279": "qprime;", + "8287": "MediumSpace;", + "8288": "NoBreak;", + "8289": "ApplyFunction;", + "8290": "it;", + "8291": "InvisibleComma;", + "8364": "euro;", + "8411": "TripleDot;", + "8412": "DotDot;", + "8450": "Copf;", + "8453": "incare;", + "8458": "gscr;", + "8459": "Hscr;", + "8460": "Poincareplane;", + "8461": "quaternions;", + "8462": "planckh;", + "8463": "plankv;", + "8464": "Iscr;", + "8465": "imagpart;", + "8466": "Lscr;", + "8467": "ell;", + "8469": "Nopf;", + "8470": "numero;", + "8471": "copysr;", + "8472": "wp;", + "8473": "primes;", + "8474": "rationals;", + "8475": "Rscr;", + "8476": "Rfr;", + "8477": "Ropf;", + "8478": "rx;", + "8482": "trade;", + "8484": "Zopf;", + "8487": "mho;", + "8488": "Zfr;", + "8489": "iiota;", + "8492": "Bscr;", + "8493": "Cfr;", + "8495": "escr;", + "8496": "expectation;", + "8497": "Fscr;", + "8499": "phmmat;", + "8500": "oscr;", + "8501": "aleph;", + "8502": "beth;", + "8503": "gimel;", + "8504": "daleth;", + "8517": "DD;", + "8518": "DifferentialD;", + "8519": "exponentiale;", + "8520": "ImaginaryI;", + "8531": "frac13;", + "8532": "frac23;", + "8533": "frac15;", + "8534": "frac25;", + "8535": "frac35;", + "8536": "frac45;", + "8537": "frac16;", + "8538": "frac56;", + "8539": "frac18;", + "8540": "frac38;", + "8541": "frac58;", + "8542": "frac78;", + "8592": "slarr;", + "8593": "uparrow;", + "8594": "srarr;", + "8595": "ShortDownArrow;", + "8596": "leftrightarrow;", + "8597": "varr;", + "8598": "UpperLeftArrow;", + "8599": "UpperRightArrow;", + "8600": "searrow;", + "8601": "swarrow;", + "8602": "nleftarrow;", + "8603": "nrightarrow;", + "8605": "rightsquigarrow;", + "8606": "twoheadleftarrow;", + "8607": "Uarr;", + "8608": "twoheadrightarrow;", + "8609": "Darr;", + "8610": "leftarrowtail;", + "8611": "rightarrowtail;", + "8612": "mapstoleft;", + "8613": "UpTeeArrow;", + "8614": "RightTeeArrow;", + "8615": "mapstodown;", + "8617": "larrhk;", + "8618": "rarrhk;", + "8619": "looparrowleft;", + "8620": "rarrlp;", + "8621": "leftrightsquigarrow;", + "8622": "nleftrightarrow;", + "8624": "lsh;", + "8625": "rsh;", + "8626": "ldsh;", + "8627": "rdsh;", + "8629": "crarr;", + "8630": "curvearrowleft;", + "8631": "curvearrowright;", + "8634": "olarr;", + "8635": "orarr;", + "8636": "lharu;", + "8637": "lhard;", + "8638": "upharpoonright;", + "8639": "upharpoonleft;", + "8640": "RightVector;", + "8641": "rightharpoondown;", + "8642": "RightDownVector;", + "8643": "LeftDownVector;", + "8644": "rlarr;", + "8645": "UpArrowDownArrow;", + "8646": "lrarr;", + "8647": "llarr;", + "8648": "uuarr;", + "8649": "rrarr;", + "8650": "downdownarrows;", + "8651": "ReverseEquilibrium;", + "8652": "rlhar;", + "8653": "nLeftarrow;", + "8654": "nLeftrightarrow;", + "8655": "nRightarrow;", + "8656": "Leftarrow;", + "8657": "Uparrow;", + "8658": "Rightarrow;", + "8659": "Downarrow;", + "8660": "Leftrightarrow;", + "8661": "vArr;", + "8662": "nwArr;", + "8663": "neArr;", + "8664": "seArr;", + "8665": "swArr;", + "8666": "Lleftarrow;", + "8667": "Rrightarrow;", + "8669": "zigrarr;", + "8676": "LeftArrowBar;", + "8677": "RightArrowBar;", + "8693": "duarr;", + "8701": "loarr;", + "8702": "roarr;", + "8703": "hoarr;", + "8704": "forall;", + "8705": "complement;", + "8706": "PartialD;", + "8707": "Exists;", + "8708": "NotExists;", + "8709": "varnothing;", + "8711": "nabla;", + "8712": "isinv;", + "8713": "notinva;", + "8715": "SuchThat;", + "8716": "NotReverseElement;", + "8719": "Product;", + "8720": "Coproduct;", + "8721": "sum;", + "8722": "minus;", + "8723": "mp;", + "8724": "plusdo;", + "8726": "ssetmn;", + "8727": "lowast;", + "8728": "SmallCircle;", + "8730": "Sqrt;", + "8733": "vprop;", + "8734": "infin;", + "8735": "angrt;", + "8736": "angle;", + "8737": "measuredangle;", + "8738": "angsph;", + "8739": "VerticalBar;", + "8740": "nsmid;", + "8741": "spar;", + "8742": "nspar;", + "8743": "wedge;", + "8744": "vee;", + "8745": "cap;", + "8746": "cup;", + "8747": "Integral;", + "8748": "Int;", + "8749": "tint;", + "8750": "oint;", + "8751": "DoubleContourIntegral;", + "8752": "Cconint;", + "8753": "cwint;", + "8754": "cwconint;", + "8755": "CounterClockwiseContourIntegral;", + "8756": "therefore;", + "8757": "because;", + "8758": "ratio;", + "8759": "Proportion;", + "8760": "minusd;", + "8762": "mDDot;", + "8763": "homtht;", + "8764": "Tilde;", + "8765": "bsim;", + "8766": "mstpos;", + "8767": "acd;", + "8768": "wreath;", + "8769": "nsim;", + "8770": "esim;", + "8771": "TildeEqual;", + "8772": "nsimeq;", + "8773": "TildeFullEqual;", + "8774": "simne;", + "8775": "NotTildeFullEqual;", + "8776": "TildeTilde;", + "8777": "NotTildeTilde;", + "8778": "approxeq;", + "8779": "apid;", + "8780": "bcong;", + "8781": "CupCap;", + "8782": "HumpDownHump;", + "8783": "HumpEqual;", + "8784": "esdot;", + "8785": "eDot;", + "8786": "fallingdotseq;", + "8787": "risingdotseq;", + "8788": "coloneq;", + "8789": "eqcolon;", + "8790": "eqcirc;", + "8791": "cire;", + "8793": "wedgeq;", + "8794": "veeeq;", + "8796": "trie;", + "8799": "questeq;", + "8800": "NotEqual;", + "8801": "equiv;", + "8802": "NotCongruent;", + "8804": "leq;", + "8805": "GreaterEqual;", + "8806": "LessFullEqual;", + "8807": "GreaterFullEqual;", + "8808": "lneqq;", + "8809": "gneqq;", + "8810": "NestedLessLess;", + "8811": "NestedGreaterGreater;", + "8812": "twixt;", + "8813": "NotCupCap;", + "8814": "NotLess;", + "8815": "NotGreater;", + "8816": "NotLessEqual;", + "8817": "NotGreaterEqual;", + "8818": "lsim;", + "8819": "gtrsim;", + "8820": "NotLessTilde;", + "8821": "NotGreaterTilde;", + "8822": "lg;", + "8823": "gtrless;", + "8824": "ntlg;", + "8825": "ntgl;", + "8826": "Precedes;", + "8827": "Succeeds;", + "8828": "PrecedesSlantEqual;", + "8829": "SucceedsSlantEqual;", + "8830": "prsim;", + "8831": "succsim;", + "8832": "nprec;", + "8833": "nsucc;", + "8834": "subset;", + "8835": "supset;", + "8836": "nsub;", + "8837": "nsup;", + "8838": "SubsetEqual;", + "8839": "supseteq;", + "8840": "nsubseteq;", + "8841": "nsupseteq;", + "8842": "subsetneq;", + "8843": "supsetneq;", + "8845": "cupdot;", + "8846": "uplus;", + "8847": "SquareSubset;", + "8848": "SquareSuperset;", + "8849": "SquareSubsetEqual;", + "8850": "SquareSupersetEqual;", + "8851": "SquareIntersection;", + "8852": "SquareUnion;", + "8853": "oplus;", + "8854": "ominus;", + "8855": "otimes;", + "8856": "osol;", + "8857": "odot;", + "8858": "ocir;", + "8859": "oast;", + "8861": "odash;", + "8862": "plusb;", + "8863": "minusb;", + "8864": "timesb;", + "8865": "sdotb;", + "8866": "vdash;", + "8867": "LeftTee;", + "8868": "top;", + "8869": "UpTee;", + "8871": "models;", + "8872": "vDash;", + "8873": "Vdash;", + "8874": "Vvdash;", + "8875": "VDash;", + "8876": "nvdash;", + "8877": "nvDash;", + "8878": "nVdash;", + "8879": "nVDash;", + "8880": "prurel;", + "8882": "vltri;", + "8883": "vrtri;", + "8884": "trianglelefteq;", + "8885": "trianglerighteq;", + "8886": "origof;", + "8887": "imof;", + "8888": "mumap;", + "8889": "hercon;", + "8890": "intercal;", + "8891": "veebar;", + "8893": "barvee;", + "8894": "angrtvb;", + "8895": "lrtri;", + "8896": "xwedge;", + "8897": "xvee;", + "8898": "xcap;", + "8899": "xcup;", + "8900": "diamond;", + "8901": "sdot;", + "8902": "Star;", + "8903": "divonx;", + "8904": "bowtie;", + "8905": "ltimes;", + "8906": "rtimes;", + "8907": "lthree;", + "8908": "rthree;", + "8909": "bsime;", + "8910": "cuvee;", + "8911": "cuwed;", + "8912": "Subset;", + "8913": "Supset;", + "8914": "Cap;", + "8915": "Cup;", + "8916": "pitchfork;", + "8917": "epar;", + "8918": "ltdot;", + "8919": "gtrdot;", + "8920": "Ll;", + "8921": "ggg;", + "8922": "LessEqualGreater;", + "8923": "gtreqless;", + "8926": "curlyeqprec;", + "8927": "curlyeqsucc;", + "8928": "nprcue;", + "8929": "nsccue;", + "8930": "nsqsube;", + "8931": "nsqsupe;", + "8934": "lnsim;", + "8935": "gnsim;", + "8936": "prnsim;", + "8937": "succnsim;", + "8938": "ntriangleleft;", + "8939": "ntriangleright;", + "8940": "ntrianglelefteq;", + "8941": "ntrianglerighteq;", + "8942": "vellip;", + "8943": "ctdot;", + "8944": "utdot;", + "8945": "dtdot;", + "8946": "disin;", + "8947": "isinsv;", + "8948": "isins;", + "8949": "isindot;", + "8950": "notinvc;", + "8951": "notinvb;", + "8953": "isinE;", + "8954": "nisd;", + "8955": "xnis;", + "8956": "nis;", + "8957": "notnivc;", + "8958": "notnivb;", + "8965": "barwedge;", + "8966": "doublebarwedge;", + "8968": "LeftCeiling;", + "8969": "RightCeiling;", + "8970": "lfloor;", + "8971": "RightFloor;", + "8972": "drcrop;", + "8973": "dlcrop;", + "8974": "urcrop;", + "8975": "ulcrop;", + "8976": "bnot;", + "8978": "profline;", + "8979": "profsurf;", + "8981": "telrec;", + "8982": "target;", + "8988": "ulcorner;", + "8989": "urcorner;", + "8990": "llcorner;", + "8991": "lrcorner;", + "8994": "sfrown;", + "8995": "ssmile;", + "9005": "cylcty;", + "9006": "profalar;", + "9014": "topbot;", + "9021": "ovbar;", + "9023": "solbar;", + "9084": "angzarr;", + "9136": "lmoustache;", + "9137": "rmoustache;", + "9140": "tbrk;", + "9141": "UnderBracket;", + "9142": "bbrktbrk;", + "9180": "OverParenthesis;", + "9181": "UnderParenthesis;", + "9182": "OverBrace;", + "9183": "UnderBrace;", + "9186": "trpezium;", + "9191": "elinters;", + "9251": "blank;", + "9416": "oS;", + "9472": "HorizontalLine;", + "9474": "boxv;", + "9484": "boxdr;", + "9488": "boxdl;", + "9492": "boxur;", + "9496": "boxul;", + "9500": "boxvr;", + "9508": "boxvl;", + "9516": "boxhd;", + "9524": "boxhu;", + "9532": "boxvh;", + "9552": "boxH;", + "9553": "boxV;", + "9554": "boxdR;", + "9555": "boxDr;", + "9556": "boxDR;", + "9557": "boxdL;", + "9558": "boxDl;", + "9559": "boxDL;", + "9560": "boxuR;", + "9561": "boxUr;", + "9562": "boxUR;", + "9563": "boxuL;", + "9564": "boxUl;", + "9565": "boxUL;", + "9566": "boxvR;", + "9567": "boxVr;", + "9568": "boxVR;", + "9569": "boxvL;", + "9570": "boxVl;", + "9571": "boxVL;", + "9572": "boxHd;", + "9573": "boxhD;", + "9574": "boxHD;", + "9575": "boxHu;", + "9576": "boxhU;", + "9577": "boxHU;", + "9578": "boxvH;", + "9579": "boxVh;", + "9580": "boxVH;", + "9600": "uhblk;", + "9604": "lhblk;", + "9608": "block;", + "9617": "blk14;", + "9618": "blk12;", + "9619": "blk34;", + "9633": "square;", + "9642": "squf;", + "9643": "EmptyVerySmallSquare;", + "9645": "rect;", + "9646": "marker;", + "9649": "fltns;", + "9651": "xutri;", + "9652": "utrif;", + "9653": "utri;", + "9656": "rtrif;", + "9657": "triangleright;", + "9661": "xdtri;", + "9662": "dtrif;", + "9663": "triangledown;", + "9666": "ltrif;", + "9667": "triangleleft;", + "9674": "lozenge;", + "9675": "cir;", + "9708": "tridot;", + "9711": "xcirc;", + "9720": "ultri;", + "9721": "urtri;", + "9722": "lltri;", + "9723": "EmptySmallSquare;", + "9724": "FilledSmallSquare;", + "9733": "starf;", + "9734": "star;", + "9742": "phone;", + "9792": "female;", + "9794": "male;", + "9824": "spadesuit;", + "9827": "clubsuit;", + "9829": "heartsuit;", + "9830": "diams;", + "9834": "sung;", + "9837": "flat;", + "9838": "natural;", + "9839": "sharp;", + "10003": "checkmark;", + "10007": "cross;", + "10016": "maltese;", + "10038": "sext;", + "10072": "VerticalSeparator;", + "10098": "lbbrk;", + "10099": "rbbrk;", + "10184": "bsolhsub;", + "10185": "suphsol;", + "10214": "lobrk;", + "10215": "robrk;", + "10216": "LeftAngleBracket;", + "10217": "RightAngleBracket;", + "10218": "Lang;", + "10219": "Rang;", + "10220": "loang;", + "10221": "roang;", + "10229": "xlarr;", + "10230": "xrarr;", + "10231": "xharr;", + "10232": "xlArr;", + "10233": "xrArr;", + "10234": "xhArr;", + "10236": "xmap;", + "10239": "dzigrarr;", + "10498": "nvlArr;", + "10499": "nvrArr;", + "10500": "nvHarr;", + "10501": "Map;", + "10508": "lbarr;", + "10509": "rbarr;", + "10510": "lBarr;", + "10511": "rBarr;", + "10512": "RBarr;", + "10513": "DDotrahd;", + "10514": "UpArrowBar;", + "10515": "DownArrowBar;", + "10518": "Rarrtl;", + "10521": "latail;", + "10522": "ratail;", + "10523": "lAtail;", + "10524": "rAtail;", + "10525": "larrfs;", + "10526": "rarrfs;", + "10527": "larrbfs;", + "10528": "rarrbfs;", + "10531": "nwarhk;", + "10532": "nearhk;", + "10533": "searhk;", + "10534": "swarhk;", + "10535": "nwnear;", + "10536": "toea;", + "10537": "tosa;", + "10538": "swnwar;", + "10547": "rarrc;", + "10549": "cudarrr;", + "10550": "ldca;", + "10551": "rdca;", + "10552": "cudarrl;", + "10553": "larrpl;", + "10556": "curarrm;", + "10557": "cularrp;", + "10565": "rarrpl;", + "10568": "harrcir;", + "10569": "Uarrocir;", + "10570": "lurdshar;", + "10571": "ldrushar;", + "10574": "LeftRightVector;", + "10575": "RightUpDownVector;", + "10576": "DownLeftRightVector;", + "10577": "LeftUpDownVector;", + "10578": "LeftVectorBar;", + "10579": "RightVectorBar;", + "10580": "RightUpVectorBar;", + "10581": "RightDownVectorBar;", + "10582": "DownLeftVectorBar;", + "10583": "DownRightVectorBar;", + "10584": "LeftUpVectorBar;", + "10585": "LeftDownVectorBar;", + "10586": "LeftTeeVector;", + "10587": "RightTeeVector;", + "10588": "RightUpTeeVector;", + "10589": "RightDownTeeVector;", + "10590": "DownLeftTeeVector;", + "10591": "DownRightTeeVector;", + "10592": "LeftUpTeeVector;", + "10593": "LeftDownTeeVector;", + "10594": "lHar;", + "10595": "uHar;", + "10596": "rHar;", + "10597": "dHar;", + "10598": "luruhar;", + "10599": "ldrdhar;", + "10600": "ruluhar;", + "10601": "rdldhar;", + "10602": "lharul;", + "10603": "llhard;", + "10604": "rharul;", + "10605": "lrhard;", + "10606": "UpEquilibrium;", + "10607": "ReverseUpEquilibrium;", + "10608": "RoundImplies;", + "10609": "erarr;", + "10610": "simrarr;", + "10611": "larrsim;", + "10612": "rarrsim;", + "10613": "rarrap;", + "10614": "ltlarr;", + "10616": "gtrarr;", + "10617": "subrarr;", + "10619": "suplarr;", + "10620": "lfisht;", + "10621": "rfisht;", + "10622": "ufisht;", + "10623": "dfisht;", + "10629": "lopar;", + "10630": "ropar;", + "10635": "lbrke;", + "10636": "rbrke;", + "10637": "lbrkslu;", + "10638": "rbrksld;", + "10639": "lbrksld;", + "10640": "rbrkslu;", + "10641": "langd;", + "10642": "rangd;", + "10643": "lparlt;", + "10644": "rpargt;", + "10645": "gtlPar;", + "10646": "ltrPar;", + "10650": "vzigzag;", + "10652": "vangrt;", + "10653": "angrtvbd;", + "10660": "ange;", + "10661": "range;", + "10662": "dwangle;", + "10663": "uwangle;", + "10664": "angmsdaa;", + "10665": "angmsdab;", + "10666": "angmsdac;", + "10667": "angmsdad;", + "10668": "angmsdae;", + "10669": "angmsdaf;", + "10670": "angmsdag;", + "10671": "angmsdah;", + "10672": "bemptyv;", + "10673": "demptyv;", + "10674": "cemptyv;", + "10675": "raemptyv;", + "10676": "laemptyv;", + "10677": "ohbar;", + "10678": "omid;", + "10679": "opar;", + "10681": "operp;", + "10683": "olcross;", + "10684": "odsold;", + "10686": "olcir;", + "10687": "ofcir;", + "10688": "olt;", + "10689": "ogt;", + "10690": "cirscir;", + "10691": "cirE;", + "10692": "solb;", + "10693": "bsolb;", + "10697": "boxbox;", + "10701": "trisb;", + "10702": "rtriltri;", + "10703": "LeftTriangleBar;", + "10704": "RightTriangleBar;", + "10716": "iinfin;", + "10717": "infintie;", + "10718": "nvinfin;", + "10723": "eparsl;", + "10724": "smeparsl;", + "10725": "eqvparsl;", + "10731": "lozf;", + "10740": "RuleDelayed;", + "10742": "dsol;", + "10752": "xodot;", + "10753": "xoplus;", + "10754": "xotime;", + "10756": "xuplus;", + "10758": "xsqcup;", + "10764": "qint;", + "10765": "fpartint;", + "10768": "cirfnint;", + "10769": "awint;", + "10770": "rppolint;", + "10771": "scpolint;", + "10772": "npolint;", + "10773": "pointint;", + "10774": "quatint;", + "10775": "intlarhk;", + "10786": "pluscir;", + "10787": "plusacir;", + "10788": "simplus;", + "10789": "plusdu;", + "10790": "plussim;", + "10791": "plustwo;", + "10793": "mcomma;", + "10794": "minusdu;", + "10797": "loplus;", + "10798": "roplus;", + "10799": "Cross;", + "10800": "timesd;", + "10801": "timesbar;", + "10803": "smashp;", + "10804": "lotimes;", + "10805": "rotimes;", + "10806": "otimesas;", + "10807": "Otimes;", + "10808": "odiv;", + "10809": "triplus;", + "10810": "triminus;", + "10811": "tritime;", + "10812": "iprod;", + "10815": "amalg;", + "10816": "capdot;", + "10818": "ncup;", + "10819": "ncap;", + "10820": "capand;", + "10821": "cupor;", + "10822": "cupcap;", + "10823": "capcup;", + "10824": "cupbrcap;", + "10825": "capbrcup;", + "10826": "cupcup;", + "10827": "capcap;", + "10828": "ccups;", + "10829": "ccaps;", + "10832": "ccupssm;", + "10835": "And;", + "10836": "Or;", + "10837": "andand;", + "10838": "oror;", + "10839": "orslope;", + "10840": "andslope;", + "10842": "andv;", + "10843": "orv;", + "10844": "andd;", + "10845": "ord;", + "10847": "wedbar;", + "10854": "sdote;", + "10858": "simdot;", + "10861": "congdot;", + "10862": "easter;", + "10863": "apacir;", + "10864": "apE;", + "10865": "eplus;", + "10866": "pluse;", + "10867": "Esim;", + "10868": "Colone;", + "10869": "Equal;", + "10871": "eDDot;", + "10872": "equivDD;", + "10873": "ltcir;", + "10874": "gtcir;", + "10875": "ltquest;", + "10876": "gtquest;", + "10877": "LessSlantEqual;", + "10878": "GreaterSlantEqual;", + "10879": "lesdot;", + "10880": "gesdot;", + "10881": "lesdoto;", + "10882": "gesdoto;", + "10883": "lesdotor;", + "10884": "gesdotol;", + "10885": "lessapprox;", + "10886": "gtrapprox;", + "10887": "lneq;", + "10888": "gneq;", + "10889": "lnapprox;", + "10890": "gnapprox;", + "10891": "lesseqqgtr;", + "10892": "gtreqqless;", + "10893": "lsime;", + "10894": "gsime;", + "10895": "lsimg;", + "10896": "gsiml;", + "10897": "lgE;", + "10898": "glE;", + "10899": "lesges;", + "10900": "gesles;", + "10901": "eqslantless;", + "10902": "eqslantgtr;", + "10903": "elsdot;", + "10904": "egsdot;", + "10905": "el;", + "10906": "eg;", + "10909": "siml;", + "10910": "simg;", + "10911": "simlE;", + "10912": "simgE;", + "10913": "LessLess;", + "10914": "GreaterGreater;", + "10916": "glj;", + "10917": "gla;", + "10918": "ltcc;", + "10919": "gtcc;", + "10920": "lescc;", + "10921": "gescc;", + "10922": "smt;", + "10923": "lat;", + "10924": "smte;", + "10925": "late;", + "10926": "bumpE;", + "10927": "preceq;", + "10928": "succeq;", + "10931": "prE;", + "10932": "scE;", + "10933": "prnE;", + "10934": "succneqq;", + "10935": "precapprox;", + "10936": "succapprox;", + "10937": "prnap;", + "10938": "succnapprox;", + "10939": "Pr;", + "10940": "Sc;", + "10941": "subdot;", + "10942": "supdot;", + "10943": "subplus;", + "10944": "supplus;", + "10945": "submult;", + "10946": "supmult;", + "10947": "subedot;", + "10948": "supedot;", + "10949": "subseteqq;", + "10950": "supseteqq;", + "10951": "subsim;", + "10952": "supsim;", + "10955": "subsetneqq;", + "10956": "supsetneqq;", + "10959": "csub;", + "10960": "csup;", + "10961": "csube;", + "10962": "csupe;", + "10963": "subsup;", + "10964": "supsub;", + "10965": "subsub;", + "10966": "supsup;", + "10967": "suphsub;", + "10968": "supdsub;", + "10969": "forkv;", + "10970": "topfork;", + "10971": "mlcp;", + "10980": "DoubleLeftTee;", + "10982": "Vdashl;", + "10983": "Barv;", + "10984": "vBar;", + "10985": "vBarv;", + "10987": "Vbar;", + "10988": "Not;", + "10989": "bNot;", + "10990": "rnmid;", + "10991": "cirmid;", + "10992": "midcir;", + "10993": "topcir;", + "10994": "nhpar;", + "10995": "parsim;", + "11005": "parsl;", + "64256": "fflig;", + "64257": "filig;", + "64258": "fllig;", + "64259": "ffilig;", + "64260": "ffllig;" +} \ No newline at end of file diff --git a/node_modules/ent/test/codes.js b/node_modules/ent/test/codes.js new file mode 100644 index 0000000..d993400 --- /dev/null +++ b/node_modules/ent/test/codes.js @@ -0,0 +1,101 @@ +var test = require('tape'); +var punycode = require('punycode'); +var ent = require('../'); + +test('amp', function (t) { + var a = 'a & b & c'; + var b = 'a & b & c'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('html', function (t) { + var a = ' © π " \''; + var b = '<html> © π " ''; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('html named', function (t) { + var a = ' © π " \' ∴ Β β'; + var b = '<html> © π " ' ∴ Β β'; + t.equal(ent.encode(a, { named: true }), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('ambiguous ampersands', function (t) { + var a = '• &bullet'; + var b = '• &bullet'; + var c = '• &bullet'; + t.equal(ent.encode(a, { named: true }), c); + t.equal(ent.decode(b), a); + t.equal(ent.decode(c), a); + t.end(); +}); + +test('entities', function (t) { + var a = '\u2124'; + var b = 'ℤ'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('entities named', function (t) { + var a = '\u2124'; + var b = 'ℤ'; + t.equal(ent.encode(a, { named: true }), b); + t.equal(ent.decode(b), a); + t.end(); +}); + +test('num', function (t) { + var a = String.fromCharCode(1337); + var b = 'Թ'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('astral num', function (t) { + var a = punycode.ucs2.encode([0x1d306]); + var b = '𝌆'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('nested escapes', function (t) { + var a = '&'; + var b = '&amp;'; + t.equal(ent.encode(a), b); + t.equal(ent.decode(b), a); + + t.equal(ent.encode(a + a), b + b); + t.equal(ent.decode(b + b), a + a); + t.end(); +}); + +test('encode `special` option', function (t) { + var a = '<>\'"&'; + var b = '<>\'"&'; + t.equal(ent.encode(a, { + named: true, + special: { + '<': true, + '>': true, + '&': true + } + }), b); + + t.end(); +}); diff --git a/node_modules/ent/test/hex.js b/node_modules/ent/test/hex.js new file mode 100644 index 0000000..bad2a9a --- /dev/null +++ b/node_modules/ent/test/hex.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var punycode = require('punycode'); +var ent = require('../'); + +test('hex', function (t) { + for (var i = 0; i < 32; i++) { + var a = String.fromCharCode(i); + if (a.match(/\s/)) { + t.equal(ent.decode(a), a); + } + else { + var b = '&#x' + i.toString(16) + ';'; + t.equal(ent.decode(b), a); + t.equal(ent.encode(a), '&#' + i + ';'); + } + } + + for (var i = 127; i < 2000; i++) { + var a = String.fromCharCode(i); + var b = '&#x' + i.toString(16) + ';'; + var c = '&#X' + i.toString(16) + ';'; + + t.equal(ent.decode(b), a); + t.equal(ent.decode(c), a); + + var encoded = ent.encode(a); + var encoded2 = ent.encode(a + a); + if (!encoded.match(/^&\w+;/)) { + t.equal(encoded, '&#' + i + ';'); + t.equal(encoded2, '&#' + i + ';&#' + i + ';'); + } + } + t.end(); +}); + diff --git a/node_modules/ent/test/num.js b/node_modules/ent/test/num.js new file mode 100644 index 0000000..399d28c --- /dev/null +++ b/node_modules/ent/test/num.js @@ -0,0 +1,13 @@ +var test = require('tape'); +var ent = require('../'); + +test('opts.numeric', function (t) { + var a = 'a & b & c'; + var ax = 'a & b & c'; + var b = ' © π " \''; + var bx = '<html> © π " ''; + + t.equal(ent.encode(a), ax); + t.equal(ent.encode(b), bx); + t.end(); +}); diff --git a/node_modules/event-target-shim/LICENSE b/node_modules/event-target-shim/LICENSE new file mode 100644 index 0000000..c39e694 --- /dev/null +++ b/node_modules/event-target-shim/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/event-target-shim/README.md b/node_modules/event-target-shim/README.md new file mode 100644 index 0000000..a4f9c1b --- /dev/null +++ b/node_modules/event-target-shim/README.md @@ -0,0 +1,293 @@ +# event-target-shim + +[![npm version](https://img.shields.io/npm/v/event-target-shim.svg)](https://www.npmjs.com/package/event-target-shim) +[![Downloads/month](https://img.shields.io/npm/dm/event-target-shim.svg)](http://www.npmtrends.com/event-target-shim) +[![Build Status](https://travis-ci.org/mysticatea/event-target-shim.svg?branch=master)](https://travis-ci.org/mysticatea/event-target-shim) +[![Coverage Status](https://codecov.io/gh/mysticatea/event-target-shim/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/event-target-shim) +[![Dependency Status](https://david-dm.org/mysticatea/event-target-shim.svg)](https://david-dm.org/mysticatea/event-target-shim) + +An implementation of [WHATWG EventTarget interface](https://dom.spec.whatwg.org/#interface-eventtarget), plus few extensions. + +- This provides `EventTarget` constructor that can inherit for your custom object. +- This provides an utility that defines properties of attribute listeners (e.g. `obj.onclick`). + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" + +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Use +const foo = new Foo() +foo.addEventListener("hello", e => console.log("hello", e)) +foo.onhello = e => console.log("onhello:", e) +foo.dispatchEvent(new CustomEvent("hello")) +``` + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install event-target-shim +``` + +Or download from [`dist` directory](./dist). + +- [dist/event-target-shim.mjs](dist/event-target-shim.mjs) ... ES modules version. +- [dist/event-target-shim.js](dist/event-target-shim.js) ... Common JS version. +- [dist/event-target-shim.umd.js](dist/event-target-shim.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" +// or +const {EventTarget, defineEventAttribute} = require("event-target-shim") + +// or UMD version defines a global variable: +const {EventTarget, defineEventAttribute} = window.EventTargetShim +``` + +### EventTarget + +> https://dom.spec.whatwg.org/#interface-eventtarget + +#### eventTarget.addEventListener(type, callback, options) + +Register an event listener. + +- `type` is a string. This is the event name to register. +- `callback` is a function. This is the event listener to register. +- `options` is a boolean or an object `{ capture?: boolean, passive?: boolean, once?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + - `passive` is the flag to ignore `event.preventDefault()` method in the event listener. + - `once` is the flag to remove the event listener automatically after the first call. + +#### eventTarget.removeEventListener(type, callback, options) + +Unregister an event listener. + +- `type` is a string. This is the event name to unregister. +- `callback` is a function. This is the event listener to unregister. +- `options` is a boolean or an object `{ capture?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + +#### eventTarget.dispatchEvent(event) + +Dispatch an event. + +- `event` is a [Event](https://dom.spec.whatwg.org/#event) object or an object `{ type: string, [key: string]: any }`. The latter is non-standard but useful. In both cases, listeners receive the event as implementing [Event](https://dom.spec.whatwg.org/#event) interface. + +### defineEventAttribute(proto, type) + +Define an event attribute (e.g. `onclick`) to `proto`. This is non-standard. + +- `proto` is an object (assuming it's a prototype object). This function defines a getter/setter pair for the event attribute. +- `type` is a string. This is the event name to define. + +For example: + +```js +class AbortSignal extends EventTarget { + constructor() { + this.aborted = false + } +} +// Define `onabort` property. +defineEventAttribute(AbortSignal.prototype, "abort") +``` + +### EventTarget(types) + +Define a custom `EventTarget` class with event attributes. This is non-standard. + +- `types` is a string or an array of strings. This is the event name to define. + +For example: + +```js +// This has `onabort` property. +class AbortSignal extends EventTarget("abort") { + constructor() { + this.aborted = false + } +} +``` + +## 📚 Examples + +### ES2015 and later + +> https://jsfiddle.net/636vea92/ + +```js +const {EventTarget, defineEventAttribute} = EventTargetShim + +// Define a derived class. +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e) +}) +foo.onhello = (e) => { + console.log("onhello", e) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +### Typescript + +```ts +import { EventTarget, defineEventAttribute } from "event-target-shim"; + +// Define events +type FooEvents = { + hello: CustomEvent +} +type FooEventAttributes = { + onhello: CustomEvent +} + +// Define a derived class. +class Foo extends EventTarget { + // ... +} +// Define `foo.onhello` property's implementation. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e.detail) +}) +foo.onhello = (e) => { + console.log("onhello", e.detail) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +Unfortunately, both `FooEvents` and `FooEventAttributes` are needed because TypeScript doesn't allow the mutation of string literal types. If TypeScript allowed us to compute `"onhello"` from `"hello"` in types, `FooEventAttributes` will be optional. + +This `EventTarget` type is compatible with `EventTarget` interface of `lib.dom.d.ts`. + +#### To disallow unknown events + +By default, methods such as `addEventListener` accept unknown events. You can disallow unknown events by the third type parameter `"strict"`. + +```ts +type FooEvents = { + hello: CustomEvent +} +class Foo extends EventTarget { + // ... +} + +// OK because `hello` is defined in FooEvents. +foo.addEventListener("hello", (e) => { +}) +// Error because `unknown` is not defined in FooEvents. +foo.addEventListener("unknown", (e) => { +}) +``` + +However, if you use `"strict"` parameter, it loses compatibility with `EventTarget` interface of `lib.dom.d.ts`. + +#### To infer the type of `dispatchEvent()` method + +TypeScript cannot infer the event type of `dispatchEvent()` method properly from the argument in most cases. You can improve this behavior with the following steps: + +1. Use the third type parameter `"strict"`. This prevents inferring to `dispatchEvent()`. +2. Make the `type` property of event definitions stricter. + +```ts +type FooEvents = { + hello: CustomEvent & { type: "hello" } + hey: Event & { type: "hey" } +} +class Foo extends EventTarget { + // ... +} + +// Error because `detail` property is lacking. +foo.dispatchEvent({ type: "hello" }) +``` + +### ES5 + +> https://jsfiddle.net/522zc9de/ + +```js +// Define a derived class. +function Foo() { + EventTarget.call(this) +} +Foo.prototype = Object.create(EventTarget.prototype, { + constructor: { value: Foo, configurable: true, writable: true } + // ... +}) + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +var foo = new Foo() +foo.addEventListener("hello", function(e) { + console.log("hello", e) +}) +foo.onhello = function(e) { + console.log("onhello", e) +} + +// Dispatching events +function isSupportEventConstrucor() { // IE does not support. + try { + new CusomEvent("hello") + return true + } catch (_err) { + return false + } +} +if (isSupportEventConstrucor()) { + foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +} else { + var e = document.createEvent("CustomEvent") + e.initCustomEvent("hello", false, false, "detail") + foo.dispatchEvent(e) +} +``` + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/event-target-shim/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/event-target-shim/dist/event-target-shim.js b/node_modules/event-target-shim/dist/event-target-shim.js new file mode 100644 index 0000000..53ce220 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js @@ -0,0 +1,871 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports.default = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.js.map b/node_modules/event-target-shim/dist/event-target-shim.js.map new file mode 100644 index 0000000..83c5f62 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.js","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs b/node_modules/event-target-shim/dist/event-target-shim.mjs new file mode 100644 index 0000000..114f3a1 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs @@ -0,0 +1,862 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +export default EventTarget; +export { defineEventAttribute, EventTarget }; +//# sourceMappingURL=event-target-shim.mjs.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs.map b/node_modules/event-target-shim/dist/event-target-shim.mjs.map new file mode 100644 index 0000000..57b3e8f --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.mjs","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.umd.js b/node_modules/event-target-shim/dist/event-target-shim.umd.js new file mode 100644 index 0000000..e7cf5d4 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.umd.js @@ -0,0 +1,6 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.EventTargetShim={}))})(this,function(a){"use strict";function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a){var b=u.get(a);return console.assert(null!=b,"'this' is expected an Event object, but got",a),b}function d(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function e(a,b){u.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),e=0;e}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","isObject","x","_typeof","getListeners","listeners","listenersMap","TypeError","defineEventAttributeDescriptor","eventName","node","listenerType","listener","next","prev","delete","newNode","passive","once","defineEventAttribute","eventTargetPrototype","defineCustomEventTarget","eventNames","CustomEventTarget","EventTarget","Map","Array","isArray","types","WeakMap","type","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","CAPTURE","BUBBLE","addEventListener","options","optionsIsObj","capture","removeEventListener","dispatchEvent","wrappedEvent","err","handleEvent"],"mappings":";;;;wbAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZD,CAAgBF,CAAhBE,QACbE,CAAAA,OAAO,CAACC,MAARD,CACY,IAARH,EAAAA,CADJG,CAEI,6CAFJA,CAGIJ,CAHJI,EAKOH,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxBA,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAALO,CAAWE,UAbS,GAiBzBF,CAAI,CAACG,QAALH,GAjByB,CAkBgB,UAArC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWI,cAlBG,EAmBrBJ,CAAI,CAACP,KAALO,CAAWI,cAAXJ,EAnBqB,QAGE,WAAnB,QAAOH,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAARR,CACI,oEADJA,CAEIG,CAAI,CAACC,eAFTJ,CANiB,EAiC7B,QAASS,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZb,CAAgB,IAAhBA,CAAsB,CAClBY,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAANpB,EAAmBqB,IAAI,CAACC,GAALD,EATZ,CAAtBnB,CAD+B,CAc/BqB,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4B,WAA5BA,CAAyC,CAAEE,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzCH,CAd+B,QAmBrBI,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYvB,CAAZuB,EACJM,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4BI,CAA5BJ,CAAiCQ,CAAwB,CAACJ,CAAD,CAAzDJ,EAyOZ,QAASQ,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,CAFR,CAAA,CAIHgB,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,EAAsB0B,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAFA,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL3B,CAAWkC,KAAXlC,CAAiBA,CAAjBA,CAAwBmC,SAAxBnC,CAHR,CAAA,CAKHgC,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAVH,CAAe,IAAfA,CAAqBvB,CAArBuB,CAAkCrC,CAAlCqC,KAPET,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYe,CAAZf,KACO,CAAhBK,GAAAA,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZF,CAAwBhB,MAAM,CAACmB,MAAPnB,CAAcc,CAAS,CAACI,SAAxBlB,CAAmC,CACvDoB,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnCrB,CAXa,KAgBhC,GACKI,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAPvB,CAAgCe,CAAhCf,CAAuCI,CAAvCJ,CADY,CAEzBwB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAPD,CACIgB,CAAW,CAACE,SADhBlB,CAEII,CAFJJ,CAGIwB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlCJ,QAUDgB,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAATA,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT+C,CAAaZ,CAAbY,QACC,KAAXD,EAAAA,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBe,CAAtBf,CAAD,CAAX,CAA2Ce,CAA3C,EACvBY,CAAQ,CAACnC,GAATmC,CAAaZ,CAAbY,CAAoBD,CAApBC,GAEGD,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBvB,CAAtBuB,CAAD,QACnB,IAAI8B,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAFD,CAAUoB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAFD,CAAUiB,UAAVjB,CAAuBiB,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAFD,CAAUkB,aAAVlB,CAA0BkB,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAFD,CAAUS,eAAVT,CAA4BS,EC3bhC,QAASkD,CAAAA,CAAT,CAAkBC,CAAlB,CAAqB,OACJ,KAANA,GAAAA,CAAC,EAA0B,QAAb,GAAAC,EAAOD,GAShC,QAASE,CAAAA,CAAT,CAAsB/C,CAAtB,CAAmC,IACzBgD,CAAAA,CAAS,CAAGC,CAAY,CAAC5D,GAAb4D,CAAiBjD,CAAjBiD,KACD,IAAbD,EAAAA,OACM,IAAIE,CAAAA,SAAJ,CACF,kEADE,QAIHF,CAAAA,EASX,QAASG,CAAAA,CAAT,CAAwCC,CAAxC,CAAmD,OACxC,CACH/D,GADG,WACG,QACI2D,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAD5B,CAEEM,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CAFT,CAGa,IAARK,EAAAA,CAHL,EAGmB,IACbA,IAAAA,CAAI,CAACC,mBACED,CAAAA,CAAI,CAACE,SAEhBF,CAAI,CAAGA,CAAI,CAACG,WAET,KAVR,CAAA,CAaHvD,GAbG,UAaCsD,EAAU,CACc,UAApB,QAAOA,CAAAA,CAAP,EAAmCX,CAAQ,CAACW,CAAD,CADrC,GAENA,CAAQ,CAAG,IAFL,SAIJP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAJpB,CAONU,CAAI,CAAG,IAPD,CAQNJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARD,CASK,IAARK,EAAAA,CATG,EAUFA,IAAAA,CAAI,CAACC,YAVH,CAYW,IAATG,GAAAA,CAZF,CAcuB,IAAdJ,GAAAA,CAAI,CAACG,IAdd,CAiBER,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,CAjBF,CAeEA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,CAfF,CAaES,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,IAbnB,CAoBFC,CAAI,CAAGJ,CApBL,CAuBNA,CAAI,CAAGA,CAAI,CAACG,IAvBN,IA2BO,IAAbD,GAAAA,EAAmB,IACbI,CAAAA,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,EAFA,CAGZM,OAAO,GAHK,CAIZC,IAAI,GAJQ,CAKZL,IAAI,CAAE,IALM,EAOH,IAATC,GAAAA,CARe,CASfT,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,CATe,CAWfS,CAAI,CAACD,IAALC,CAAYE,EAnDrB,CAAA,CAuDHzC,YAAY,GAvDT,CAwDHN,UAAU,GAxDP,EAkEX,QAASkD,CAAAA,CAAT,CAA8BC,CAA9B,CAAoDX,CAApD,CAA+D,CAC3D3C,MAAM,CAACC,cAAPD,CACIsD,CADJtD,aAES2C,EAFT3C,CAGI0C,CAA8B,CAACC,CAAD,CAHlC3C,EAaJ,QAASuD,CAAAA,CAAT,CAAiCC,CAAjC,CAA6C,SAEhCC,CAAAA,GAAoB,CACzBC,CAAW,CAACzC,IAAZyC,CAAiB,IAAjBA,EAGJD,CAAiB,CAACvC,SAAlBuC,CAA8BzD,MAAM,CAACmB,MAAPnB,CAAc0D,CAAW,CAACxC,SAA1BlB,CAAqC,CAC/DoB,WAAW,CAAE,CACTlB,KAAK,CAAEuD,CADE,CAEThD,YAAY,GAFH,CAGTY,QAAQ,GAHC,CADkD,CAArCrB,CANW,KAcpC,GAAIM,CAAAA,CAAC,CAAG,EAAGA,CAAC,CAAGkD,CAAU,CAACjD,OAAQ,EAAED,EACrC+C,CAAoB,CAACI,CAAiB,CAACvC,SAAnB,CAA8BsC,CAAU,CAAClD,CAAD,CAAxC,CAApB+C,OAGGI,CAAAA,EAgBX,QAASC,CAAAA,CAAT,EAAuB,IAEf,eAAgBA,CAAAA,aAChBlB,CAAAA,CAAY,CAAChD,GAAbgD,CAAiB,IAAjBA,CAAuB,GAAImB,CAAAA,GAA3BnB,KAGqB,CAArB5B,GAAAA,SAAS,CAACL,MAAVK,EAA0BgD,KAAK,CAACC,OAAND,CAAchD,SAAS,CAAC,CAAD,CAAvBgD,QACnBL,CAAAA,CAAuB,CAAC3C,SAAS,CAAC,CAAD,CAAV,KAEX,CAAnBA,CAAAA,SAAS,CAACL,OAAY,QAChBuD,CAAAA,CAAK,CAAOF,KAAP,CAAahD,SAAS,CAACL,MAAvB,EACFD,CAAC,CAAG,EAAGA,CAAC,CAAGM,SAAS,CAACL,OAAQ,EAAED,EACpCwD,CAAK,CAACxD,CAAD,CAALwD,CAAWlD,SAAS,CAACN,CAAD,CAApBwD,OAEGP,CAAAA,CAAuB,CAACO,CAAD,OAE5B,IAAIrB,CAAAA,SAAJ,CAAc,mCAAd,KD5KJ9D,CAAAA,CAAW,CAAG,GAAIoF,CAAAA,QAOlBpC,CAAQ,CAAG,GAAIoC,CAAAA,QAkFrBzE,CAAK,CAAC4B,SAAN5B,CAAkB,IAKV0E,CAAAA,MAAO,OACAxF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAewF,IANZ,CAAA,IAaVC,CAAAA,QAAS,OACFzF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASkB,aAtBN,CAAA,CA4BdwE,YA5Bc,WA4BC,IACLxE,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAFA,CAASkB,cADpB,MAEU,KAAjBA,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVyE,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV7E,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASiB,UAzEN,CAAA,CAgFd8E,eAhFc,WAgFI,IACRvF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHc,CAI4B,UAAtC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWuF,eAJR,EAKVvF,CAAI,CAACP,KAALO,CAAWuF,eAAXvF,EArFM,CAAA,CA6FdwF,wBA7Fc,WA6Fa,IACjBxF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHuB,CAIvBA,CAAI,CAACY,gBAALZ,GAJuB,CAK4B,UAA/C,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWwF,wBALC,EAMnBxF,CAAI,CAACP,KAALO,CAAWwF,wBAAXxF,EAnGM,CAAA,IA2GVyF,CAAAA,SAAU,SACKjG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeiG,OA5GpB,CAAA,IAmHVvF,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeU,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIVkG,CAAAA,kBAAmB,OACZlG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASW,QApIN,CAAA,IA2IVwF,CAAAA,UAAW,SACInG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAemG,QA5IpB,CAAA,IAmJV9E,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASqB,SApJN,CAAA,IA4JV+E,CAAAA,YAAa,OACNpG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WA7JN,CAAA,IAqKVsF,CAAAA,cAAe,OACRrG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASmB,OAtKN,CAAA,IAwKVkF,CAAAA,aAAa3E,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,IACuC,SAAnC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAW6F,eAClB7F,CAAI,CAACP,KAALO,CAAW6F,YAAX7F,KAhLM,CAAA,IAyLV8F,CAAAA,aAAc,OACP,CAACtG,CAAE,CAAC,IAAD,CAAFA,CAASW,QA1LP,CAAA,IA4LV2F,CAAAA,YAAY5E,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMduG,SAzMc,WAyMF,EAzME,EA+MlB/E,MAAM,CAACC,cAAPD,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuC,aAAvCA,CAAsD,CAClDE,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtDrB,EAOsB,WAAlB,QAAOgF,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAAC1F,QAC/CU,MAAM,CAACiF,cAAPjF,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuCgF,MAAM,CAAC1F,KAAP0F,CAAa9D,SAApDlB,EAGA2B,CAAQ,CAACnC,GAATmC,CAAaqD,MAAM,CAAC1F,KAAP0F,CAAa9D,SAA1BS,CAAqCrC,CAArCqC,MChTEa,CAAAA,CAAY,CAAG,GAAIuB,CAAAA,QAGnBmB,CAAO,CAAG,EACVC,CAAM,CAAG,KA0KfzB,CAAW,CAACxC,SAAZwC,CAAwB,CAQpB0B,gBARoB,UAQHzC,EAAWG,EAAUuC,EAAS,IAC3B,IAAZvC,EAAAA,MAGoB,UAApB,QAAOA,CAAAA,CAAP,EAAkC,CAACX,CAAQ,CAACW,CAAD,OACrC,IAAIL,CAAAA,SAAJ,CAAc,+CAAd,KAGJF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBgD,CAAY,CAAGnD,CAAQ,CAACkD,CAAD,EACvBE,CAAO,CAAGD,CAAY,GACdD,CAAO,CAACE,OADM,GAEdF,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EACnCjC,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,CAAZA,CAFY,CAGZM,OAAO,CAAEmC,CAAY,IAAYD,CAAO,CAAClC,OAH7B,CAIZC,IAAI,CAAEkC,CAAY,IAAYD,CAAO,CAACjC,IAJ1B,CAKZL,IAAI,CAAE,IALM,EASZH,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,KACPK,SAAAA,aACAL,CAAAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,SAKAS,CAAAA,CAAI,CAAG,KACI,IAARJ,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,SAK1BG,CAAI,CAAGJ,CARU,CASjBA,CAAI,CAAGA,CAAI,CAACG,IAxC2B,CA4C3CC,CAAI,CAACD,IAALC,CAAYE,EApDI,CAAA,CA8DpBsC,mBA9DoB,UA8DA7C,EAAWG,EAAUuC,EAAS,IAC9B,IAAZvC,EAAAA,SAIEP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBiD,CAAO,CAAGpD,CAAQ,CAACkD,CAAD,CAARlD,GACFkD,CAAO,CAACE,OADNpD,GAEFkD,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EAErCnC,CAAI,CAAG,KACPJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,EACI,IAARK,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,cAET,IAATG,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,MASzBC,CAAI,CAAGJ,CAfU,CAgBjBA,CAAI,CAAGA,CAAI,CAACG,KA3FA,CAAA,CAoGpB0C,aApGoB,UAoGNhH,EAAO,IACJ,IAATA,EAAAA,CAAK,EAAkC,QAAtB,QAAOA,CAAAA,CAAK,CAACuF,UACxB,IAAIvB,CAAAA,SAAJ,CAAc,oCAAd,EAFO,GAMXF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CANb,CAOXK,CAAS,CAAGlE,CAAK,CAACuF,IAPP,CAQbpB,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARM,IASL,IAARK,EAAAA,WATa,OAcX8C,CAAAA,CAAY,CAAG7D,CAAS,CAAC,IAAD,CAAOpD,CAAP,CAdb,CAkBbuE,CAAI,CAAG,IAlBM,CAmBF,IAARJ,EAAAA,CAnBU,EAmBI,IAEbA,CAAI,CAACQ,KACQ,IAATJ,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,KAOrBC,CAAI,CAAGJ,EAIXV,CAAkB,CACdwD,CADc,CAEd9C,CAAI,CAACO,OAALP,CAAeA,CAAI,CAACE,QAApBF,CAA+B,IAFjB,EAIW,UAAzB,QAAOA,CAAAA,CAAI,CAACE,YACR,CACAF,CAAI,CAACE,QAALF,CAAc3B,IAAd2B,CAAmB,IAAnBA,CAAyB8C,CAAzB9C,CADJ,CAEE,MAAO+C,CAAP,CAAY,CAEa,WAAnB,QAAO9G,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAHT,EAKNR,OAAO,CAACQ,KAARR,CAAc8G,CAAd9G,MAIR+D,CAAAA,CAAI,CAACC,YAALD,GA/TE,CA+TFA,EACqC,UAArC,QAAOA,CAAAA,CAAI,CAACE,QAALF,CAAcgD,aAErBhD,CAAI,CAACE,QAALF,CAAcgD,WAAdhD,CAA0B8C,CAA1B9C,KAIAb,CAAS,CAAC2D,CAAD,QAIb9C,CAAI,CAAGA,CAAI,CAACG,WAEhBb,CAAAA,CAAkB,CAACwD,CAAD,CAAe,IAAf,EAClB1D,CAAa,CAAC0D,CAAD,CAAe,CAAf,EACbzD,CAAgB,CAACyD,CAAD,CAAe,IAAf,EAET,CAACA,CAAY,CAAChB,iBAvKL,EA4KxB1E,MAAM,CAACC,cAAPD,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6C,aAA7CA,CAA4D,CACxDE,KAAK,CAAEwD,CADiD,CAExDjD,YAAY,GAF4C,CAGxDY,QAAQ,GAHgD,CAA5DrB,EAQsB,WAAlB,QAAOgF,CAAAA,MAAP,EAC8B,WAA9B,QAAOA,CAAAA,MAAM,CAACtB,aAEd1D,MAAM,CAACiF,cAAPjF,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6CgF,MAAM,CAACtB,WAAPsB,CAAmB9D,SAAhElB"} \ No newline at end of file diff --git a/node_modules/event-target-shim/index.d.ts b/node_modules/event-target-shim/index.d.ts new file mode 100644 index 0000000..a303097 --- /dev/null +++ b/node_modules/event-target-shim/index.d.ts @@ -0,0 +1,399 @@ +export as namespace EventTargetShim + +/** + * `Event` interface. + * @see https://dom.spec.whatwg.org/#event + */ +export interface Event { + /** + * The type of this event. + */ + readonly type: string + + /** + * The target of this event. + */ + readonly target: EventTarget<{}, {}, "standard"> | null + + /** + * The current target of this event. + */ + readonly currentTarget: EventTarget<{}, {}, "standard"> | null + + /** + * The target of this event. + * @deprecated + */ + readonly srcElement: any | null + + /** + * The composed path of this event. + */ + composedPath(): EventTarget<{}, {}, "standard">[] + + /** + * Constant of NONE. + */ + readonly NONE: number + + /** + * Constant of CAPTURING_PHASE. + */ + readonly CAPTURING_PHASE: number + + /** + * Constant of BUBBLING_PHASE. + */ + readonly BUBBLING_PHASE: number + + /** + * Constant of AT_TARGET. + */ + readonly AT_TARGET: number + + /** + * Indicates which phase of the event flow is currently being evaluated. + */ + readonly eventPhase: number + + /** + * Stop event bubbling. + */ + stopPropagation(): void + + /** + * Stop event bubbling. + */ + stopImmediatePropagation(): void + + /** + * Initialize event. + * @deprecated + */ + initEvent(type: string, bubbles?: boolean, cancelable?: boolean): void + + /** + * The flag indicating bubbling. + */ + readonly bubbles: boolean + + /** + * Stop event bubbling. + * @deprecated + */ + cancelBubble: boolean + + /** + * Set or get cancellation flag. + * @deprecated + */ + returnValue: boolean + + /** + * The flag indicating whether the event can be canceled. + */ + readonly cancelable: boolean + + /** + * Cancel this event. + */ + preventDefault(): void + + /** + * The flag to indicating whether the event was canceled. + */ + readonly defaultPrevented: boolean + + /** + * The flag to indicating if event is composed. + */ + readonly composed: boolean + + /** + * Indicates whether the event was dispatched by the user agent. + */ + readonly isTrusted: boolean + + /** + * The unix time of this event. + */ + readonly timeStamp: number +} + +/** + * The constructor of `EventTarget` interface. + */ +export type EventTargetConstructor< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = { + prototype: EventTarget + new(): EventTarget +} + +/** + * `EventTarget` interface. + * @see https://dom.spec.whatwg.org/#interface-eventtarget + */ +export type EventTarget< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = EventTarget.EventAttributes & { + /** + * Add a given listener to this event target. + * @param eventName The event name to add. + * @param listener The listener to add. + * @param options The options for this listener. + */ + addEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.AddOptions + ): void + + /** + * Remove a given listener from this event target. + * @param eventName The event name to remove. + * @param listener The listener to remove. + * @param options The options for this listener. + */ + removeEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.RemoveOptions + ): void + + /** + * Dispatch a given event. + * @param event The event to dispatch. + * @returns `false` if canceled. + */ + dispatchEvent>( + event: EventTarget.EventData + ): boolean +} + +export const EventTarget: EventTargetConstructor & { + /** + * Create an `EventTarget` instance with detailed event definition. + * + * The detailed event definition requires to use `defineEventAttribute()` + * function later. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * const signal = new EventTarget<{ abort: Event }, { onabort: Event }>() + * defineEventAttribute(signal, "abort") + */ + new < + TEvents extends EventTarget.EventDefinition, + TEventAttributes extends EventTarget.EventDefinition, + TMode extends EventTarget.Mode = "loose" + >(): EventTarget + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(event0: string, ...events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > +} + +export namespace EventTarget { + /** + * Options of `removeEventListener()` method. + */ + export interface RemoveOptions { + /** + * The flag to indicate that the listener is for the capturing phase. + */ + capture?: boolean + } + + /** + * Options of `addEventListener()` method. + */ + export interface AddOptions extends RemoveOptions { + /** + * The flag to indicate that the listener doesn't support + * `event.preventDefault()` operation. + */ + passive?: boolean + /** + * The flag to indicate that the listener will be removed on the first + * event. + */ + once?: boolean + } + + /** + * The type of regular listeners. + */ + export interface FunctionListener { + (event: TEvent): void + } + + /** + * The type of object listeners. + */ + export interface ObjectListener { + handleEvent(event: TEvent): void + } + + /** + * The type of listeners. + */ + export type Listener = + | FunctionListener + | ObjectListener + + /** + * Event definition. + */ + export type EventDefinition = { + readonly [key: string]: Event + } + + /** + * Mapped type for event attributes. + */ + export type EventAttributes = { + [P in keyof TEventAttributes]: + | FunctionListener + | null + } + + /** + * The type of event data for `dispatchEvent()` method. + */ + export type EventData< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + TMode extends Mode + > = + TEventType extends keyof TEvents + ? ( + // Require properties which are not generated automatically. + & Pick< + TEvents[TEventType], + Exclude + > + // Properties which are generated automatically are optional. + & Partial> + ) + : ( + TMode extends "standard" + ? Event + : Event | NonStandardEvent + ) + + /** + * The string literal types of the properties which are generated + * automatically in `dispatchEvent()` method. + */ + export type OmittableEventKeys = Exclude + + /** + * The type of event data. + */ + export type NonStandardEvent = { + [key: string]: any + type: string + } + + /** + * The type of listeners. + */ + export type PickEvent< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + > = + TEventType extends keyof TEvents + ? TEvents[TEventType] + : Event + + /** + * Event type candidates. + */ + export type EventType< + TEvents extends EventDefinition, + TMode extends Mode + > = + TMode extends "strict" + ? keyof TEvents + : keyof TEvents | string + + /** + * - `"strict"` ..... Methods don't accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"loose"` ...... Methods accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"standard"` ... Methods accept unknown events. + * `dispatchEvent()` doesn't accept partial objects. + */ + export type Mode = "strict" | "standard" | "loose" +} + +/** + * Specialized `type` property. + */ +export type Type = { type: T } + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param prototype The event target prototype to define an event attribute. + * @param eventName The event name to define. + */ +export function defineEventAttribute( + prototype: EventTarget, + eventName: string +): void + +export default EventTarget diff --git a/node_modules/event-target-shim/package.json b/node_modules/event-target-shim/package.json new file mode 100644 index 0000000..40326f3 --- /dev/null +++ b/node_modules/event-target-shim/package.json @@ -0,0 +1,82 @@ +{ + "name": "event-target-shim", + "version": "5.0.1", + "description": "An implementation of WHATWG EventTarget interface.", + "main": "dist/event-target-shim", + "types": "index.d.ts", + "files": [ + "dist", + "index.d.ts" + ], + "engines": { + "node": ">=6" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "nyc report --reporter lcov && opener coverage/lcov-report/index.html", + "lint": "eslint src test scripts --ext .js,.mjs", + "build": "rollup -c scripts/rollup.config.js", + "pretest": "npm run lint", + "test": "run-s test:*", + "test:mocha": "nyc --require ./scripts/babel-register mocha test/*.mjs", + "test:karma": "karma start scripts/karma.conf.js --single-run", + "watch": "run-p watch:*", + "watch:mocha": "mocha test/*.mjs --require ./scripts/babel-register --watch --watch-extensions js,mjs --growl", + "watch:karma": "karma start scripts/karma.conf.js --watch", + "codecov": "codecov" + }, + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.2.3", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.0.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.1", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-json": "^3.1.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-watch": "^4.3.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "repository": { + "type": "git", + "url": "https://github.com/mysticatea/event-target-shim.git" + }, + "keywords": [ + "w3c", + "whatwg", + "eventtarget", + "event", + "events", + "shim" + ], + "author": "Toru Nagashima", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/event-target-shim/issues" + }, + "homepage": "https://github.com/mysticatea/event-target-shim" +} diff --git a/node_modules/extend/.editorconfig b/node_modules/extend/.editorconfig new file mode 100644 index 0000000..bc228f8 --- /dev/null +++ b/node_modules/extend/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 150 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/extend/.eslintrc b/node_modules/extend/.eslintrc new file mode 100644 index 0000000..a34cf28 --- /dev/null +++ b/node_modules/extend/.eslintrc @@ -0,0 +1,17 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": [2, 20], + "eqeqeq": [2, "allow-null"], + "func-name-matching": [1], + "max-depth": [1, 4], + "max-statements": [2, 26], + "no-extra-parens": [1], + "no-magic-numbers": [0], + "no-restricted-syntax": [2, "BreakStatement", "ContinueStatement", "DebuggerStatement", "LabeledStatement", "WithStatement"], + "sort-keys": [0], + } +} diff --git a/node_modules/extend/.jscs.json b/node_modules/extend/.jscs.json new file mode 100644 index 0000000..3cce01d --- /dev/null +++ b/node_modules/extend/.jscs.json @@ -0,0 +1,175 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 6 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": false, + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/extend/.travis.yml b/node_modules/extend/.travis.yml new file mode 100644 index 0000000..5ccdfc4 --- /dev/null +++ b/node_modules/extend/.travis.yml @@ -0,0 +1,230 @@ +language: node_js +os: + - linux +node_js: + - "10.7" + - "9.11" + - "8.11" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "10.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/extend/CHANGELOG.md b/node_modules/extend/CHANGELOG.md new file mode 100644 index 0000000..2cf7de6 --- /dev/null +++ b/node_modules/extend/CHANGELOG.md @@ -0,0 +1,83 @@ +3.0.2 / 2018-07-19 +================== + * [Fix] Prevent merging `__proto__` property (#48) + * [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` + * [Tests] up to `node` `v10.7`, `v9.11`, `v8.11`, `v7.10`, `v6.14`, `v4.9`; use `nvm install-latest-npm` + +3.0.1 / 2017-04-27 +================== + * [Fix] deep extending should work with a non-object (#46) + * [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config` + * [Tests] up to `node` `v7.9`, `v6.10`, `v4.8`; improve matrix + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG. + * [Docs] Add example to readme (#34) + +3.0.0 / 2015-07-01 +================== + * [Possible breaking change] Use global "strict" directive (#32) + * [Tests] `int` is an ES3 reserved word + * [Tests] Test up to `io.js` `v2.3` + * [Tests] Add `npm run eslint` + * [Dev Deps] Update `covert`, `jscs` + +2.0.1 / 2015-04-25 +================== + * Use an inline `isArray` check, for ES3 browsers. (#27) + * Some old browsers fail when an identifier is `toString` + * Test latest `node` and `io.js` versions on `travis-ci`; speed up builds + * Add license info to package.json (#25) + * Update `tape`, `jscs` + * Adding a CHANGELOG + +2.0.0 / 2014-10-01 +================== + * Increase code coverage to 100%; run code coverage as part of tests + * Add `npm run lint`; Run linter as part of tests + * Remove nodeType and setInterval checks in isPlainObject + * Updating `tape`, `jscs`, `covert` + * General style and README cleanup + +1.3.0 / 2014-06-20 +================== + * Add component.json for browser support (#18) + * Use SVG for badges in README (#16) + * Updating `tape`, `covert` + * Updating travis-ci to work with multiple node versions + * Fix `deep === false` bug (returning target as {}) (#14) + * Fixing constructor checks in isPlainObject + * Adding additional test coverage + * Adding `npm run coverage` + * Add LICENSE (#13) + * Adding a warning about `false`, per #11 + * General style and whitespace cleanup + +1.2.1 / 2013-09-14 +================== + * Fixing hasOwnProperty bugs that would only have shown up in specific browsers. Fixes #8 + * Updating `tape` + +1.2.0 / 2013-09-02 +================== + * Updating the README: add badges + * Adding a missing variable reference. + * Using `tape` instead of `buster` for tests; add more tests (#7) + * Adding node 0.10 to Travis CI (#6) + * Enabling "npm test" and cleaning up package.json (#5) + * Add Travis CI. + +1.1.3 / 2012-12-06 +================== + * Added unit tests. + * Ensure extend function is named. (Looks nicer in a stack trace.) + * README cleanup. + +1.1.1 / 2012-11-07 +================== + * README cleanup. + * Added installation instructions. + * Added a missing semicolon + +1.0.0 / 2012-04-08 +================== + * Initial commit + diff --git a/node_modules/extend/LICENSE b/node_modules/extend/LICENSE new file mode 100644 index 0000000..e16d6a5 --- /dev/null +++ b/node_modules/extend/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2014 Stefan Thomas + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/extend/README.md b/node_modules/extend/README.md new file mode 100644 index 0000000..5b8249a --- /dev/null +++ b/node_modules/extend/README.md @@ -0,0 +1,81 @@ +[![Build Status][travis-svg]][travis-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] + +# extend() for Node.js [![Version Badge][npm-version-png]][npm-url] + +`node-extend` is a port of the classic extend() method from jQuery. It behaves as you expect. It is simple, tried and true. + +Notes: + +* Since Node.js >= 4, + [`Object.assign`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) + now offers the same functionality natively (but without the "deep copy" option). + See [ECMAScript 2015 (ES6) in Node.js](https://nodejs.org/en/docs/es6). +* Some native implementations of `Object.assign` in both Node.js and many + browsers (since NPM modules are for the browser too) may not be fully + spec-compliant. + Check [`object.assign`](https://www.npmjs.com/package/object.assign) module for + a compliant candidate. + +## Installation + +This package is available on [npm][npm-url] as: `extend` + +``` sh +npm install extend +``` + +## Usage + +**Syntax:** extend **(** [`deep`], `target`, `object1`, [`objectN`] **)** + +*Extend one object with one or more others, returning the modified object.* + +**Example:** + +``` js +var extend = require('extend'); +extend(targetObject, object1, object2); +``` + +Keep in mind that the target object will be modified, and will be returned from extend(). + +If a boolean true is specified as the first argument, extend performs a deep copy, recursively copying any objects it finds. Otherwise, the copy will share structure with the original object(s). +Undefined properties are not copied. However, properties inherited from the object's prototype will be copied over. +Warning: passing `false` as the first argument is not supported. + +### Arguments + +* `deep` *Boolean* (optional) +If set, the merge becomes recursive (i.e. deep copy). +* `target` *Object* +The object to extend. +* `object1` *Object* +The object that will be merged into the first. +* `objectN` *Object* (Optional) +More objects to merge into the first. + +## License + +`node-extend` is licensed under the [MIT License][mit-license-url]. + +## Acknowledgements + +All credit to the jQuery authors for perfecting this amazing utility. + +Ported to Node.js by [Stefan Thomas][github-justmoon] with contributions by [Jonathan Buchanan][github-insin] and [Jordan Harband][github-ljharb]. + +[travis-svg]: https://travis-ci.org/justmoon/node-extend.svg +[travis-url]: https://travis-ci.org/justmoon/node-extend +[npm-url]: https://npmjs.org/package/extend +[mit-license-url]: http://opensource.org/licenses/MIT +[github-justmoon]: https://github.com/justmoon +[github-insin]: https://github.com/insin +[github-ljharb]: https://github.com/ljharb +[npm-version-png]: http://versionbadg.es/justmoon/node-extend.svg +[deps-svg]: https://david-dm.org/justmoon/node-extend.svg +[deps-url]: https://david-dm.org/justmoon/node-extend +[dev-deps-svg]: https://david-dm.org/justmoon/node-extend/dev-status.svg +[dev-deps-url]: https://david-dm.org/justmoon/node-extend#info=devDependencies + diff --git a/node_modules/extend/component.json b/node_modules/extend/component.json new file mode 100644 index 0000000..1500a2f --- /dev/null +++ b/node_modules/extend/component.json @@ -0,0 +1,32 @@ +{ + "name": "extend", + "author": "Stefan Thomas (http://www.justmoon.net)", + "version": "3.0.0", + "description": "Port of jQuery.extend for node.js and the browser.", + "scripts": [ + "index.js" + ], + "contributors": [ + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "keywords": [ + "extend", + "clone", + "merge" + ], + "repository" : { + "type": "git", + "url": "https://github.com/justmoon/node-extend.git" + }, + "dependencies": { + }, + "devDependencies": { + "tape" : "~3.0.0", + "covert": "~0.4.0", + "jscs": "~1.6.2" + } +} + diff --git a/node_modules/extend/index.js b/node_modules/extend/index.js new file mode 100644 index 0000000..2aa3faa --- /dev/null +++ b/node_modules/extend/index.js @@ -0,0 +1,117 @@ +'use strict'; + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; diff --git a/node_modules/extend/package.json b/node_modules/extend/package.json new file mode 100644 index 0000000..85279f7 --- /dev/null +++ b/node_modules/extend/package.json @@ -0,0 +1,42 @@ +{ + "name": "extend", + "author": "Stefan Thomas (http://www.justmoon.net)", + "version": "3.0.2", + "description": "Port of jQuery.extend for node.js and the browser", + "main": "index", + "scripts": { + "pretest": "npm run lint", + "test": "npm run tests-only", + "posttest": "npm run coverage-quiet", + "tests-only": "node test", + "coverage": "covert test/index.js", + "coverage-quiet": "covert test/index.js --quiet", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js" + }, + "contributors": [ + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "keywords": [ + "extend", + "clone", + "merge" + ], + "repository": { + "type": "git", + "url": "https://github.com/justmoon/node-extend.git" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "eslint": "^4.19.1", + "jscs": "^3.0.7", + "tape": "^4.9.1" + }, + "license": "MIT" +} diff --git a/node_modules/fast-xml-parser/CHANGELOG.md b/node_modules/fast-xml-parser/CHANGELOG.md new file mode 100644 index 0000000..3925c32 --- /dev/null +++ b/node_modules/fast-xml-parser/CHANGELOG.md @@ -0,0 +1,584 @@ +Note: If you find missing information about particular minor version, that version must have been changed without any functional change in this library. + +**4.3.6 / 2024-03-16** +* Add support for parsing HTML numeric entities (#645) (By [Jonas Schade ](https://github.com/DerZade)) + +**4.3.5 / 2024-02-24** +* code for v5 is added for experimental use + +**4.3.4 / 2024-01-10** +* fix: Don't escape entities in CDATA sections (#633) (By [wackbyte](https://github.com/wackbyte)) + +**4.3.3 / 2024-01-10** +* Remove unnecessary regex + +**4.3.2 / 2023-10-02** +* fix `jObj.hasOwnProperty` when give input is null (By [Arda TANRIKULU](https://github.com/ardatan)) + +**4.3.1 / 2023-09-24** +* revert back "Fix typings for builder and parser to make return type generic" to avoid failure of existing projects. Need to decide a common approach. + +**4.3.0 / 2023-09-20** +* Fix stopNodes to work with removeNSPrefix (#607) (#608) (By [Craig Andrews]https://github.com/candrews)) +* Fix #610 ignore properties set to Object.prototype +* Fix typings for builder and parser to make return type generic (By [Sarah Dayan](https://github.com/sarahdayan)) + +**4.2.7 / 2023-07-30** +* Fix: builder should set text node correctly when only textnode is present (#589) (By [qianqing](https://github.com/joneqian)) +* Fix: Fix for null and undefined attributes when building xml (#585) (#598). A null or undefined value should be ignored. (By [Eugenio Ceschia](https://github.com/cecia234)) + +**4.2.6 / 2023-07-17** +* Fix: Remove trailing slash from jPath for self-closing tags (#595) (By [Maciej Radzikowski](https://github.com/m-radzikowski)) + +**4.2.5 / 2023-06-22** +* change code implementation + +**4.2.4 / 2023-06-06** +* fix security bug + +**4.2.3 / 2023-06-05** +* fix security bug + +**4.2.2 / 2023-04-18** +* fix #562: fix unpaired tag when it comes in last of a nested tag. Also throw error when unpaired tag is used as closing tag + +**4.2.1 / 2023-04-18** +* fix: jpath after unpaired tags + +**4.2.0 / 2023-04-09** +* support `updateTag` parser property + +**4.1.4 / 2023-04-08** +* update typings to let user create XMLBuilder instance without options (#556) (By [Patrick](https://github.com/omggga)) +* fix: IsArray option isn't parsing tags with 0 as value correctly #490 (#557) (By [Aleksandr Murashkin](https://github.com/p-kuen)) +* feature: support `oneListGroup` to group repeated children tags udder single group + +**4.1.3 / 2023-02-26** +* fix #546: Support complex entity value + +**4.1.2 / 2023-02-12** +* Security Fix + +**4.1.1 / 2023-02-03** +* Fix #540: ignoreAttributes breaks unpairedTags +* Refactor XML builder code + +**4.1.0 / 2023-02-02** +* Fix '<' or '>' in DTD comment throwing an error. (#533) (By [Adam Baker](https://github.com/Cwazywierdo)) +* Set "eNotation" to 'true' as default + +**4.0.15 / 2023-01-25** +* make "eNotation" optional + +**4.0.14 / 2023-01-22** +* fixed: add missed typing "eNotation" to parse values + +**4.0.13 / 2023-01-07** +* preserveorder formatting (By [mdeknowis](https://github.com/mdeknowis)) +* support `transformAttributeName` (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.12 / 2022-11-19** +* fix typescript + +**4.0.11 / 2022-10-05** +* fix #501: parse for entities only once + +**4.0.10 / 2022-09-14** +* fix broken links in demo site (By [Yannick Lang](https://github.com/layaxx)) +* fix #491: tagValueProcessor type definition (By [Andrea Francesco Speziale](https://github.com/andreafspeziale)) +* Add jsdocs for tagValueProcessor + + +**4.0.9 / 2022-07-10** +* fix #470: stop-tag can have self-closing tag with same name +* fix #472: stopNode can have any special tag inside +* Allow !ATTLIST and !NOTATION with DOCTYPE +* Add transformTagName option to transform tag names when parsing (#469) (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.8 / 2022-05-28** +* Fix CDATA parsing returning empty string when value = 0 (#451) (By [ndelanou](https://github.com/ndelanou)) +* Fix stopNodes when same tag appears inside node (#456) (By [patrickshipe](https://github.com/patrickshipe)) +* fix #468: prettify own properties only + +**4.0.7 / 2022-03-18** +* support CDATA even if tag order is not preserved +* support Comments even if tag order is not preserved +* fix #446: XMLbuilder should not indent XML declaration + +**4.0.6 / 2022-03-08** +* fix: call tagValueProcessor only once for array items +* fix: missing changed for #437 + +**4.0.5 / 2022-03-06** +* fix #437: call tagValueProcessor from XML builder + +**4.0.4 / 2022-03-03** +* fix #435: should skip unpaired and self-closing nodes when set as stopnodes + +**4.0.3 / 2022-02-15** +* fix: ReferenceError when Bundled with Strict (#431) (By [Andreas Heissenberger](https://github.com/aheissenberger)) + + +**4.0.2 / 2022-02-04** +* builder supports `suppressUnpairedNode` +* parser supports `ignoreDeclaration` and `ignorePiTags` +* fix: when comment is parsed as text value if given as ` ...` #423 +* builder supports decoding `&` + +**4.0.1 / 2022-01-08** +* fix builder for pi tag +* fix: support suppressBooleanAttrs by builder + +**4.0.0 / 2022-01-06** +* Generating different combined, parser only, builder only, validator only browser bundles +* Keeping cjs modules as they can be imported in cjs and esm modules both. Otherwise refer `esm` branch. + +**4.0.0-beta.8 / 2021-12-13** +* call tagValueProcessor for stop nodes + +**4.0.0-beta.7 / 2021-12-09** +* fix Validator bug when an attribute has no value but '=' only +* XML Builder should suppress unpaired tags by default. +* documents update for missing features +* refactoring to use Object.assign +* refactoring to remove repeated code + +**4.0.0-beta.6 / 2021-12-05** +* Support PI Tags processing +* Support `suppressBooleanAttributes` by XML Builder for attributes with value `true`. + +**4.0.0-beta.5 / 2021-12-04** +* fix: when a tag with name "attributes" + +**4.0.0-beta.4 / 2021-12-02** +* Support HTML document parsing +* skip stop nodes parsing when building the XML from JS object +* Support external entites without DOCTYPE +* update dev dependency: strnum v1.0.5 to fix long number issue + +**4.0.0-beta.3 / 2021-11-30** +* support global stopNodes expression like "*.stop" +* support self-closing and paired unpaired tags +* fix: CDATA should not be parsed. +* Fix typings for XMLBuilder (#396)(By [Anders Emil Salvesen](https://github.com/andersem)) +* supports XML entities, HTML entities, DOCTYPE entities + +**⚠️ 4.0.0-beta.2 / 2021-11-19** +* rename `attrMap` to `attibutes` in parser output when `preserveOrder:true` +* supports unpairedTags + +**⚠️ 4.0.0-beta.1 / 2021-11-18** +* Parser returns an array now + * to make the structure common + * and to return root level detail +* renamed `cdataTagName` to `cdataPropName` +* Added `commentPropName` +* fix typings + +**⚠️ 4.0.0-beta.0 / 2021-11-16** +* Name change of many configuration properties. + * `attrNodeName` to `attributesGroupName` + * `attrValueProcessor` to `attributeValueProcessor` + * `parseNodeValue` to `parseTagValue` + * `ignoreNameSpace` to `removeNSPrefix` + * `numParseOptions` to `numberParseOptions` + * spelling correction for `suppressEmptyNode` +* Name change of cli and browser bundle to **fxparser** +* `isArray` option is added to parse a tag into array +* `preserveOrder` option is added to render XML in such a way that the result js Object maintains the order of properties same as in XML. +* Processing behaviour of `tagValueProcessor` and `attributeValueProcessor` are changes with extra input parameters +* j2xparser is renamed to XMLBuilder. +* You need to build XML parser instance for given options first before parsing XML. +* fix #327, #336: throw error when extra text after XML content +* fix #330: attribute value can have '\n', +* fix #350: attrbiutes can be separated by '\n' from tagname + +3.21.1 / 2021-10-31 +* Correctly format JSON elements with a text prop but no attribute props ( By [haddadnj](https://github.com/haddadnj) ) + +3.21.0 / 2021-10-25 + * feat: added option `rootNodeName` to set tag name for array input when converting js object to XML. + * feat: added option `alwaysCreateTextNode` to force text node creation (by: *@massimo-ua*) + * ⚠️ feat: Better error location for unclosed tags. (by *@Gei0r*) + * Some error messages would be changed when validating XML. Eg + * `{ InvalidXml: "Invalid '[ \"rootNode\"]' found." }` → `{InvalidTag: "Unclosed tag 'rootNode'."}` + * `{ InvalidTag: "Closing tag 'rootNode' is expected inplace of 'rootnode'." }` → `{ InvalidTag: "Expected closing tag 'rootNode' (opened in line 1) instead of closing tag 'rootnode'."}` + * ⚠️ feat: Column in error response when validating XML +```js +{ + "code": "InvalidAttr", + "msg": "Attribute 'abc' is repeated.", + "line": 1, + "col": 22 +} +``` + +3.20.1 / 2021-09-25 + * update strnum package + +3.20.0 / 2021-09-10 + * Use strnum npm package to parse string to number + * breaking change: long number will be parsed to scientific notation. + +3.19.0 / 2021-03-14 + * License changed to MIT original + * Fix #321 : namespace tag parsing + +3.18.0 / 2021-02-05 + * Support RegEx and function in arrayMode option + * Fix #317 : validate nested PI tags + +3.17.4 / 2020-06-07 + * Refactor some code to support IE11 + * Fix: `` space as attribute string + +3.17.3 / 2020-05-23 + * Fix: tag name separated by \n \t + * Fix: throw error for unclosed tags + +3.17.2 / 2020-05-23 + * Fixed an issue in processing doctype tag + * Fixed tagName where it should not have whitespace chars + +3.17.1 / 2020-05-19 + * Fixed an issue in checking opening tag + +3.17.0 / 2020-05-18 + * parser: fix '<' issue when it comes in aatr value + * parser: refactoring to remove dependency from regex + * validator: fix IE 11 issue for error messages + * updated dev dependencies + * separated benchmark module to sub-module + * breaking change: comments will not be removed from CDATA data + +3.16.0 / 2020-01-12 + * validaor: fix for ampersand characters (#215) + * refactoring to support unicode chars in tag name + * update typing for validator error + +3.15.1 / 2019-12-09 + * validaor: fix multiple roots are not allowed + +3.15.0 / 2019-11-23 + * validaor: improve error messaging + * validator: add line number in case of error + * validator: add more error scenarios to make it more descriptive + +3.14.0 / 2019-10-25 + * arrayMode for XML to JS obj parsing + +3.13.0 / 2019-10-02 + * pass tag/attr name to tag/attr value processor + * inbuilt optional validation with XML parser + +3.12.21 / 2019-10-02 + * Fix validator for unclosed XMLs + * move nimnjs dependency to dev dependency + * update dependencies + +3.12.20 / 2019-08-16 + * Revert: Fix #167: '>' in attribute value as it is causing high performance degrade. + +3.12.19 / 2019-07-28 + * Fix js to xml parser should work for date values. (broken: `tagValueProcessor` will receive the original value instead of string always) (breaking change) + +3.12.18 / 2019-07-27 + * remove configstore dependency + +3.12.17 / 2019-07-14 + * Fix #167: '>' in attribute value + +3.12.16 / 2019-03-23 + * Support a new option "stopNodes". (#150) +Accept the list of tags which are not required to be parsed. Instead, all the nested tag and data will be assigned as string. + * Don't show post-install message + +3.12.12 / 2019-01-11 + * fix : IE parseInt, parseFloat error + +3.12.11 / 2018-12-24 + * fix #132: "/" should not be parsed as boolean attr in case of self closing tags + +3.12.9 / 2018-11-23 + * fix #129 : validator should not fail when an atrribute name is 'length' + +3.12.8 / 2018-11-22 + * fix #128 : use 'attrValueProcessor' to process attribute value in json2xml parser + +3.12.6 / 2018-11-10 + * Fix #126: check for type + +3.12.4 / 2018-09-12 + * Fix: include tasks in npm package + +3.12.3 / 2018-09-12 + * Fix CLI issue raised in last PR + +3.12.2 / 2018-09-11 + * Fix formatting for JSON to XML output + * Migrate to webpack (PR merged) + * fix cli (PR merged) + +3.12.0 / 2018-08-06 + * Support hexadecimal values + * Support true number parsing + +3.11.2 / 2018-07-23 + * Update Demo for more options + * Update license information + * Update readme for formatting, users, and spelling mistakes + * Add missing typescript definition for j2xParser + * refactoring: change filenames + +3.11.1 / 2018-06-05 + * fix #93: read the text after self closing tag + +3.11.0 / 2018-05-20 + * return defaultOptions if there are not options in buildOptions function + * added localeRange declaration in parser.d.ts + * Added support of cyrillic characters in validator XML + * fixed bug in validator work when XML data with byte order marker + +3.10.0 / 2018-05-13 + * Added support of cyrillic characters in parsing XML to JSON + +3.9.11 / 2018-05-09 + * fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/80 fix nimn chars + * update package information + * fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/86: json 2 xml parser : property with null value should be parsed to self closing tag. + * update online demo + * revert zombiejs to old version to support old version of node + * update dependencies + +3.3.10 / 2018-04-23 + * fix #77 : parse even if closing tag has space before '>' + * include all css & js lib in demo app + * remove babel dependencies until needed + +3.3.9 / 2018-04-18 + * fix #74 : TS2314 TypeScript compiler error + +3.3.8 / 2018-04-17 + * fix #73 : IE doesn't support Object.assign + +3.3.7 / 2018-04-14 + * fix: use let insted of const in for loop of validator + * Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/71 from bb/master + first draft of typings for typescript + https://github.com/NaturalIntelligence/fast-xml-parser/issues/69 + * Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/70 from bb/patch-1 + fix some typos in readme + +3.3.6 / 2018-03-21 + * change arrow functions to full notation for IE compatibility + +3.3.5 / 2018-03-15 + * fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/67 : attrNodeName invalid behavior + * fix: remove decodeHTML char condition + +3.3.4 / 2018-03-14 + * remove dependency on "he" package + * refactor code to separate methods in separate files. + * draft code for transforming XML to json string. It is not officially documented due to performance issue. + +3.3.0 / 2018-03-05 + * use common default options for XML parsing for consistency. And add `parseToNimn` method. + * update nexttodo + * update README about XML to Nimn transformation and remove special notes about 3.x release + * update CONTRIBUTING.ms mentioning nexttodo + * add negative case for XML PIs + * validate xml processing instruction tags https://github.com/NaturalIntelligence/fast-xml-parser/issues/62 + * nimndata: handle array with object + * nimndata: node with nested node and text node + * nimndata: handle attributes and text node + * nimndata: add options, handle array + * add xml to nimn data converter + * x2j: direct access property with tagname + * update changelog + * fix validator when single quote presents in value enclosed with double quotes or vice versa + * Revert "remove unneded nimnjs dependency, move opencollective to devDependencies and replace it + with more light opencollective-postinstall" + This reverts commit d47aa7181075d82db4fee97fd8ea32b056fe3f46. + * Merge pull request: https://github.com/NaturalIntelligence/fast-xml-parser/issues/63 from HaroldPutman/suppress-undefined + Keep undefined nodes out of the XML output : This is useful when you are deleting nodes from the JSON and rewriting XML. + +3.2.4 / 2018-03-01 + * fix #59 fix in validator when open quote presents in attribute value + * Create nexttodo.md + * exclude static from bitHound tests + * add package lock + +3.2.3 / 2018-02-28 + * Merge pull request from Delagen/master: fix namespaces can contain the same characters as xml names + +3.2.2 / 2018-02-22 + * fix: attribute xmlns should not be removed if ignoreNameSpace is false + * create CONTRIBUTING.md + +3.2.1 / 2018-02-17 + * fix: empty attribute should be parsed + +3.2.0 / 2018-02-16 + * Merge pull request : Dev to Master + * Update README and version + * j2x:add performance test + * j2x: Remove extra empty line before closing tag + * j2x: suppress empty nodes to self closing node if configured + * j2x: provide option to give indentation depth + * j2x: make optional formatting + * j2x: encodeHTMLchat + * j2x: handle cdata tag + * j2x: handle grouped attributes + * convert json to xml + - nested object + - array + - attributes + - text value + * small refactoring + * Merge pull request: Update cli.js to let user validate XML file or data + * Add option for rendering CDATA as separate property + +3.0.1 / 2018-02-09 + * fix CRLF: replace it with single space in attributes value only. + +3.0.0 / 2018-02-08 + * change online tool with new changes + * update info about new options + * separate tag value processing to separate function + * make HTML decoding optional + * give an option to allow boolean attributes + * change cli options as per v3 + * Correct comparison table format on README + * update v3 information + * some performance improvement changes + * Make regex object local to the method and move some common methods to util + * Change parser to + - handle multiple instances of CDATA + - make triming of value optionals + - HTML decode attribute and text value + - refactor code to separate files + * Ignore newline chars without RE (in validator) + * validate for XML prolog + * Validate DOCTYPE without RE + * Update validator to return error response + * Update README to add detail about V3 + * Separate xmlNode model class + * include vscode debug config + * fix for repeated object + * fix attribute regex for boolean attributes + * Fix validator for invalid attributes +2.9.4 / 2018-02-02 + * Merge pull request: Decode HTML characters + * refactor source folder name + * ignore bundle / browser js to be published to npm +2.9.3 / 2018-01-26 + * Merge pull request: Correctly remove CRLF line breaks + * Enable to parse attribute in online editor + * Fix testing demo app test + * Describe parsing options + * Add options for online demo +2.9.2 / 2018-01-18 + * Remove check if tag starting with "XML" + * Fix: when there are spaces before / after CDATA + +2.9.1 / 2018-01-16 + * Fix: newline should be replaced with single space + * Fix: for single and multiline comments + * validate xml with CDATA + * Fix: the issue when there is no space between 2 attributes + * Fix: https://github.com/NaturalIntelligence/fast-xml-parser/issues/33: when there is newline char in attr val, it doesn't parse + * Merge pull request: fix ignoreNamespace + * fix: don't wrap attributes if only namespace attrs + * fix: use portfinder for run tests, update deps + * fix: don't treat namespaces as attributes when ignoreNamespace enabled + +2.9.0 / 2018-01-10 + * Rewrite the validator to handle large files. + Ignore DOCTYPE validation. + * Fix: When attribute value has equal sign + +2.8.3 / 2017-12-15 + * Fix: when a tag has value along with subtags + +2.8.2 / 2017-12-04 + * Fix value parsing for IE + +2.8.1 / 2017-12-01 + * fix: validator should return false instead of err when invalid XML + +2.8.0 / 2017-11-29 + * Add CLI option to ignore value conversion + * Fix variable name when filename is given on CLI + * Update CLI help text + * Merge pull request: xml2js: Accept standard input + * Test Node 8 + * Update dependencies + * Bundle readToEnd + * Add ability to read from standard input + +2.7.4 / 2017-09-22 + * Merge pull request: Allow wrap attributes with subobject to compatible with other parsers output + +2.7.3 / 2017-08-02 + * fix: handle CDATA with regx + +2.7.2 / 2017-07-30 + * Change travis config for yarn caching + * fix validator: when tag property is same as array property + * Merge pull request: Failing test case in validator for valid SVG + +2.7.1 / 2017-07-26 + * Fix: Handle val 0 + +2.7.0 / 2017-07-25 + * Fix test for arrayMode + * Merge pull request: Add arrayMode option to parse any nodes as arrays + +2.6.0 / 2017-07-14 + * code improvement + * Add unit tests for value conversion for attr + * Merge pull request: option of an attribute value conversion to a number (textAttrConversion) the same way as the textNodeConversion option does. Default value is false. + +2.5.1 / 2017-07-01 + * Fix XML element name pattern + * Fix XML element name pattern while parsing + * Fix validation for xml tag element + +2.5.0 / 2017-06-25 + * Improve Validator performance + * update attr matching regex + * Add perf tests + * Improve atrr regex to handle all cases + +2.4.4 / 2017-06-08 + * Bug fix: when an attribute has single or double quote in value + +2.4.3 / 2017-06-05 + * Bug fix: when multiple CDATA tags are given + * Merge pull request: add option "textNodeConversion" + * add option "textNodeConversion" + +2.4.1 / 2017-04-14 + * fix tests + * Bug fix: preserve initial space of node value + * Handle CDATA + +2.3.1 / 2017-03-15 + * Bug fix: when single self closing tag + * Merge pull request: fix .codeclimate.yml + * Update .codeclimate.yml - Fixed config so it does not error anymore. + * Update .codeclimate.yml + +2.3.0 / 2017-02-26 + * Code improvement + * add bithound config + * Update usage + * Update travis to generate bundle js before running tests + * 1.Browserify, 2. add more tests for validator + * Add validator + * Fix CLI default parameter bug + +2.2.1 / 2017-02-05 + * Bug fix: CLI default option diff --git a/node_modules/fast-xml-parser/LICENSE b/node_modules/fast-xml-parser/LICENSE new file mode 100644 index 0000000..d7da622 --- /dev/null +++ b/node_modules/fast-xml-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-xml-parser/README.md b/node_modules/fast-xml-parser/README.md new file mode 100644 index 0000000..500e1f3 --- /dev/null +++ b/node_modules/fast-xml-parser/README.md @@ -0,0 +1,208 @@ +# [fast-xml-parser](https://www.npmjs.com/package/fast-xml-parser) +[![Known Vulnerabilities](https://snyk.io/test/github/naturalintelligence/fast-xml-parser/badge.svg)](https://snyk.io/test/github/naturalintelligence/fast-xml-parser) +[![NPM quality][quality-image]][quality-url] +[![Coverage Status](https://coveralls.io/repos/github/NaturalIntelligence/fast-xml-parser/badge.svg?branch=master)](https://coveralls.io/github/NaturalIntelligence/fast-xml-parser?branch=master) +[Try me](https://naturalintelligence.github.io/fast-xml-parser/) +[![NPM total downloads](https://img.shields.io/npm/dt/fast-xml-parser.svg)](https://npm.im/fast-xml-parser) + +[quality-image]: http://npm.packagequality.com/shield/fast-xml-parser.svg?style=flat-square +[quality-url]: http://packagequality.com/#?package=fast-xml-parser + + +Validate XML, Parse XML to JS Object, or Build XML from JS Object without C/C++ based libraries and no callback. + +> XML Parser v5 is added for experimental use +> https://solothought.com + +Sponsor this project 👉 + + + + + + + Stubmatic donate button + + +### Current Sponsors + +Check the complete list at [ThankYouBackers](https://github.com/NaturalIntelligence/ThankYouBackers) for our sponsors and supporters. + +Through Github + + + + +Through OpenCollective + + + + + + + + + + + + + +## Users + + + + + + + + + + + + + + + + +Check the bigger [list](./USERs.md) + +The list of users is collected either from the list published by Github, communicated directly through mails/chat , or from other resources. If you feel that your name in the above list is incorrectly published or you're not the user of this library anymore then you can inform us to remove it. We'll do the necessary changes ASAP. + +If you want to be an anonymous user of this application and don't want to be highlighted anywhere then you can contact me at +- githubissues@proton.me +- https://linkedin.com/in/amitguptagwl/ + +## Main Features + +FXP logo + +* Validate XML data syntactically +* Parse XML to JS Object +* Build XML from JS Object +* Works with node packages, in browser, and in CLI (press try me button above for demo) +* Faster than any other pure JS implementation. +* It can handle big files (tested up to 100mb). +* Controlled parsing using various options +* XML Entities, HTML entities, and DOCTYPE entites are supported. +* unpaired tags (Eg `
` in HTML), stop nodes (Eg ` +: + +``` + +Check lib folder for different browser bundles + +| Bundle Name | Size | +| ------------------ | ---- | +| fxbuilder.min.js | 5.2K | +| fxparser.js | 50K | +| fxparser.min.js | 17K | +| fxp.min.js | 22K | +| fxvalidator.min.js | 5.7K | + +### Documents +**v3** +* [documents](./docs/v3/docs.md) + +**v4** +1. [GettingStarted.md](./docs/v4/1.GettingStarted.md) +2. [XML Parser](./docs/v4/2.XMLparseOptions.md) +3. [XML Builder](./docs/v4/3.XMLBuilder.md) +4. [XML Validator](./docs/v4/4.XMLValidator.md) +5. [Entities](./docs/v4/5.Entities.md) +6. [HTML Document Parsing](./docs/v4/6.HTMLParsing.md) +7. [PI Tag processing](./docs/v4/7.PITags.md) +## Performance + +### XML Parser + +![](./docs/imgs/XMLParser_v4.png) +* Y-axis: requests per second +* X-axis: File size + +**Large files** +![](./docs/imgs/XMLParser_large_v4.png) +* Y-axis: requests per second +* X-axis: File size +### XML Builder + +![](./docs/imgs/XMLBuilder_v4.png) +* Y-axis: requests per second + +negative means error + +[![](static/img/ni_ads_ads.gif)](https://github.com/NaturalIntelligence/ads/) + + +## Supporters +### Contributors + +This project exists thanks to [all](graphs/contributors) the people who contribute. [[Contribute](docs/CONTRIBUTING.md)]. + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/fast-xml-parser#backer)] + + + + + +# License +* MIT License + +![Donate $5](static/img/donation_quote.png) diff --git a/node_modules/fast-xml-parser/package.json b/node_modules/fast-xml-parser/package.json new file mode 100644 index 0000000..fbc66c5 --- /dev/null +++ b/node_modules/fast-xml-parser/package.json @@ -0,0 +1,74 @@ +{ + "name": "fast-xml-parser", + "version": "4.3.6", + "description": "Validate XML, Parse XML, Build XML without C/C++ based libraries", + "main": "./src/fxp.js", + "scripts": { + "test": "nyc --reporter=lcov --reporter=text jasmine spec/*spec.js", + "test-types": "tsc --noEmit spec/typings/typings-test.ts", + "unit": "jasmine", + "coverage": "nyc report --reporter html --reporter text -t .nyc_output --report-dir .nyc_output/summary", + "perf": "node ./benchmark/perfTest3.js", + "lint": "eslint src/*.js spec/*.js", + "bundle": "webpack --config webpack-prod.config.js", + "prettier": "prettier --write src/**/*.js", + "publish-please": "publish-please", + "checkReadiness": "publish-please --dry-run" + }, + "bin": { + "fxparser": "./src/cli/cli.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/fast-xml-parser" + }, + "keywords": [ + "fast", + "xml", + "json", + "parser", + "xml2js", + "x2js", + "xml2json", + "js", + "cli", + "validator", + "validate", + "transformer", + "assert", + "js2xml", + "json2xml", + "html" + ], + "author": "Amit Gupta (https://solothought.com)", + "license": "MIT", + "devDependencies": { + "@babel/core": "^7.13.10", + "@babel/plugin-transform-runtime": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@babel/register": "^7.13.8", + "@types/node": "20", + "babel-loader": "^8.2.2", + "cytorus": "^0.2.9", + "eslint": "^8.3.0", + "he": "^1.2.0", + "jasmine": "^3.6.4", + "nyc": "^15.1.0", + "prettier": "^1.19.1", + "publish-please": "^5.5.2", + "typescript": "5", + "webpack": "^5.64.4", + "webpack-cli": "^4.9.1" + }, + "typings": "src/fxp.d.ts", + "funding": [{ + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + },{ + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }], + "dependencies": { + "strnum": "^1.0.5" + } +} diff --git a/node_modules/fast-xml-parser/src/cli/cli.js b/node_modules/fast-xml-parser/src/cli/cli.js new file mode 100644 index 0000000..984534c --- /dev/null +++ b/node_modules/fast-xml-parser/src/cli/cli.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node +'use strict'; +/*eslint-disable no-console*/ +const fs = require('fs'); +const path = require('path'); +const {XMLParser, XMLValidator} = require("../fxp"); +const readToEnd = require('./read').readToEnd; + +const version = require('./../../package.json').version; +if (process.argv[2] === '--help' || process.argv[2] === '-h') { + console.log(require("./man")); +} else if (process.argv[2] === '--version') { + console.log(version); +} else { + const options = { + removeNSPrefix: true, + ignoreAttributes: false, + parseTagValue: true, + parseAttributeValue: true, + }; + let fileName = ''; + let outputFileName; + let validate = false; + let validateOnly = false; + for (let i = 2; i < process.argv.length; i++) { + if (process.argv[i] === '-ns') { + options.removeNSPrefix = false; + } else if (process.argv[i] === '-a') { + options.ignoreAttributes = true; + } else if (process.argv[i] === '-c') { + options.parseTagValue = false; + options.parseAttributeValue = false; + } else if (process.argv[i] === '-o') { + outputFileName = process.argv[++i]; + } else if (process.argv[i] === '-v') { + validate = true; + } else if (process.argv[i] === '-V') { + validateOnly = true; + } else { + //filename + fileName = process.argv[i]; + } + } + + const callback = function(xmlData) { + let output = ''; + if (validate) { + const parser = new XMLParser(options); + output = parser.parse(xmlData,validate); + } else if (validateOnly) { + output = XMLValidator.validate(xmlData); + process.exitCode = output === true ? 0 : 1; + } else { + const parser = new XMLParser(options); + output = JSON.stringify(parser.parse(xmlData,validate), null, 4); + } + if (outputFileName) { + writeToFile(outputFileName, output); + } else { + console.log(output); + } + }; + + try { + + if (!fileName) { + readToEnd(process.stdin, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } else { + fs.readFile(fileName, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } + } catch (e) { + console.log('Seems an invalid file or stream.' + e); + } +} + +function writeToFile(fileName, data) { + fs.writeFile(fileName, data, function(err) { + if (err) { + throw err; + } + console.log('JSON output has been written to ' + fileName); + }); +} diff --git a/node_modules/fast-xml-parser/src/cli/man.js b/node_modules/fast-xml-parser/src/cli/man.js new file mode 100644 index 0000000..89947cc --- /dev/null +++ b/node_modules/fast-xml-parser/src/cli/man.js @@ -0,0 +1,12 @@ +module.exports = `Fast XML Parser 4.0.0 +---------------- +$ fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] +$ cat xmlfile.xml | fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] + +Options +---------------- +-ns: remove namespace from tag and atrribute name. +-a: don't parse attributes. +-c: parse values to premitive type. +-v: validate before parsing. +-V: validate only.` \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/cli/read.js b/node_modules/fast-xml-parser/src/cli/read.js new file mode 100644 index 0000000..642da52 --- /dev/null +++ b/node_modules/fast-xml-parser/src/cli/read.js @@ -0,0 +1,92 @@ +'use strict'; + +// Copyright 2013 Timothy J Fontaine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE + +/* + +Read any stream all the way to the end and trigger a single cb + +const http = require('http'); + +const rte = require('readtoend'); + +http.get('http://nodejs.org', function(response) { + rte.readToEnd(response, function(err, body) { + console.log(body); + }); +}); + +*/ + +let stream = require('stream'); +const util = require('util'); + +if (!stream.Transform) { + stream = require('readable-stream'); +} + +function ReadToEnd(opts) { + if (!(this instanceof ReadToEnd)) { + return new ReadToEnd(opts); + } + + stream.Transform.call(this, opts); + + this._rte_encoding = opts.encoding || 'utf8'; + + this._buff = ''; +} + +module.exports = ReadToEnd; +util.inherits(ReadToEnd, stream.Transform); + +ReadToEnd.prototype._transform = function(chunk, encoding, done) { + this._buff += chunk.toString(this._rte_encoding); + this.push(chunk); + done(); +}; + +ReadToEnd.prototype._flush = function(done) { + this.emit('complete', undefined, this._buff); + done(); +}; + +ReadToEnd.readToEnd = function(stream, options, cb) { + if (!cb) { + cb = options; + options = {}; + } + + const dest = new ReadToEnd(options); + + stream.pipe(dest); + + stream.on('error', function(err) { + stream.unpipe(dest); + cb(err); + }); + + dest.on('complete', cb); + + dest.resume(); + + return dest; +}; diff --git a/node_modules/fast-xml-parser/src/fxp.d.ts b/node_modules/fast-xml-parser/src/fxp.d.ts new file mode 100644 index 0000000..835e44c --- /dev/null +++ b/node_modules/fast-xml-parser/src/fxp.d.ts @@ -0,0 +1,402 @@ +type X2jOptions = { + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Whether to remove namespace string from tag and attribute names + * + * Defaults to `false` + */ + removeNSPrefix?: boolean; + + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `true` + */ + parseTagValue?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `false` + */ + parseAttributeValue?: boolean; + + /** + * Whether to remove surrounding whitespace from tag or attribute value + * + * Defaults to `true` + */ + trimValues?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (tagName: string, tagValue: string, jPath: string, hasAttributes: boolean, isLeafNode: boolean) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (attrName: string, attrValue: string, jPath: string) => unknown; + + /** + * Options to pass to `strnum` for parsing numbers + * + * Defaults to `{ hex: true, leadingZeros: true, eNotation: true }` + */ + numberParseOptions?: strnumOptions; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Whether to always create a text node + * + * Defaults to `false` + */ + alwaysCreateTextNode?: boolean; + + /** + * Determine whether a tag should be parsed as an array + * + * @param tagName + * @param jPath + * @param isLeafNode + * @param isAttribute + * @returns {boolean} + * + * Defaults to `() => false` + */ + isArray?: (tagName: string, jPath: string, isLeafNode: boolean, isAttribute: boolean) => boolean; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + /** + * Whether to process HTML entities + * + * Defaults to `false` + */ + htmlEntities?: boolean; + + /** + * Whether to ignore the declaration tag from output + * + * Defaults to `false` + */ + ignoreDeclaration?: boolean; + + /** + * Whether to ignore Pi tags + * + * Defaults to `false` + */ + ignorePiTags?: boolean; + + /** + * Transform tag names + * + * Defaults to `false` + */ + transformTagName?: ((tagName: string) => string) | false; + + /** + * Transform attribute names + * + * Defaults to `false` + */ + transformAttributeName?: ((attributeName: string) => string) | false; + + /** + * Change the tag name when a different name is returned. Skip the tag from parsed result when false is returned. + * Modify `attrs` object to control attributes for the given tag. + * + * @returns {string} new tag name. + * @returns false to skip the tag + * + * Defaults to `(tagName, jPath, attrs) => tagName` + */ + updateTag?: (tagName: string, jPath: string, attrs: {[k: string]: string}) => string | boolean; +}; + +type strnumOptions = { + hex: boolean; + leadingZeros: boolean, + skipLike?: RegExp, + eNotation?: boolean +} + +type validationOptions = { + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; +}; + +type XmlBuilderOptions = { + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Whether to make output pretty instead of single line + * + * Defaults to `false` + */ + format?: boolean; + + + /** + * If `format` is set to `true`, sets the indent string + * + * Defaults to ` ` + */ + indentBy?: string; + + /** + * Give a name to a top-level array + * + * Defaults to `undefined` + */ + arrayNodeName?: string; + + /** + * Create empty tags for tags with no text value + * + * Defaults to `false` + */ + suppressEmptyNode?: boolean; + + /** + * Suppress an unpaired tag + * + * Defaults to `true` + */ + suppressUnpairedNode?: boolean; + + /** + * Don't put a value for boolean attributes + * + * Defaults to `true` + */ + suppressBooleanAttributes?: boolean; + + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (name: string, value: unknown) => string; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (name: string, value: unknown) => string; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + + oneListGroup?: boolean; +}; + +type ESchema = string | object | Array; + +type ValidationError = { + err: { + code: string; + msg: string, + line: number, + col: number + }; +}; + +export class XMLParser { + constructor(options?: X2jOptions); + parse(xmlData: string | Buffer ,validationOptions?: validationOptions | boolean): any; + /** + * Add Entity which is not by default supported by this library + * @param entityIdentifier {string} Eg: 'ent' for &ent; + * @param entityValue {string} Eg: '\r' + */ + addEntity(entityIdentifier: string, entityValue: string): void; +} + +export class XMLValidator{ + static validate( xmlData: string, options?: validationOptions): true | ValidationError; +} +export class XMLBuilder { + constructor(options?: XmlBuilderOptions); + build(jObj: any): any; +} diff --git a/node_modules/fast-xml-parser/src/fxp.js b/node_modules/fast-xml-parser/src/fxp.js new file mode 100644 index 0000000..9cfa0ac --- /dev/null +++ b/node_modules/fast-xml-parser/src/fxp.js @@ -0,0 +1,11 @@ +'use strict'; + +const validator = require('./validator'); +const XMLParser = require('./xmlparser/XMLParser'); +const XMLBuilder = require('./xmlbuilder/json2xml'); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/util.js b/node_modules/fast-xml-parser/src/util.js new file mode 100644 index 0000000..df0a60d --- /dev/null +++ b/node_modules/fast-xml-parser/src/util.js @@ -0,0 +1,72 @@ +'use strict'; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; diff --git a/node_modules/fast-xml-parser/src/v5/CharsSymbol.js b/node_modules/fast-xml-parser/src/v5/CharsSymbol.js new file mode 100644 index 0000000..fa5ce9e --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/CharsSymbol.js @@ -0,0 +1,16 @@ +modules.export = { + "<" : "<", //tag start + ">" : ">", //tag end + "/" : "/", //close tag + "!" : "!", //comment or docttype + "!--" : "!--", //comment + "-->" : "-->", //comment end + "?" : "?", //pi + "?>" : "?>", //pi end + "?xml" : "?xml", //pi end + "![" : "![", //cdata + "]]>" : "]]>", //cdata end + "[" : "[", + "-" : "-", + "D" : "D", +} \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/EntitiesParser.js b/node_modules/fast-xml-parser/src/v5/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js b/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js new file mode 100644 index 0000000..be1f1d4 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js @@ -0,0 +1,64 @@ + +const JsObjOutputBuilder = require("./OutputBuilders/JsObjBuilder"); + +const defaultOptions = { + preserveOrder: false, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + //ignoreRootElement : false, + stopNodes: [], //nested tags will not be parsed even for errors + // isArray: () => false, //User will set it + htmlEntities: false, + // skipEmptyListItem: false + tags:{ + unpaired: [], + nameFor:{ + cdata: false, + comment: false, + text: '#text' + }, + separateTextProperty: false, + }, + attributes:{ + ignore: false, + booleanType: true, + entities: true, + }, + + // select: ["img[src]"], + // stop: ["anim", "[ads]"] + only: [], // rest tags will be skipped. It will result in flat array + hierarchy: false, //will be used when a particular tag is set to be parsed. + skip: [], // will be skipped from parse result. on('skip') will be triggered + + select: [], // on('select', tag => tag ) will be called if match + stop: [], //given tagPath will not be parsed. innerXML will be set as string value + OutputBuilder: new JsObjOutputBuilder(), +}; + +const buildOptions = function(options) { + const finalOptions = { ... defaultOptions}; + copyProperties(finalOptions,options) + return finalOptions; +}; + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (key === 'OutputBuilder') { + target[key] = source[key]; + }else if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js b/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js new file mode 100644 index 0000000..be2d478 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js @@ -0,0 +1,71 @@ +class BaseOutputBuilder{ + constructor(){ + // this.attributes = {}; + } + + addAttribute(name, value){ + if(this.options.onAttribute){ + //TODO: better to pass tag path + const v = this.options.onAttribute(name, value, this.tagName); + if(v) this.attributes[v.name] = v.value; + }else{ + name = this.options.attributes.prefix + name + this.options.attributes.suffix; + this.attributes[name] = this.parseValue(value, this.options.attributes.valueParsers); + } + } + + /** + * parse value by chain of parsers + * @param {string} val + * @returns {any} parsed value if matching parser found + */ + parseValue = function(val, valParsers){ + for (let i = 0; i < valParsers.length; i++) { + let valParser = valParsers[i]; + if(typeof valParser === "string"){ + valParser = this.registeredParsers[valParser]; + } + if(valParser){ + val = valParser.parse(val); + } + } + return val; + } + + /** + * To add a nested empty tag. + * @param {string} key + * @param {any} val + */ + _addChild(key, val){} + + /** + * skip the comment if property is not set + */ + addComment(text){ + if(this.options.nameFor.comment) + this._addChild(this.options.nameFor.comment, text); + } + + //store CDATA separately if property is set + //otherwise add to tag's value + addCdata(text){ + if (this.options.nameFor.cdata) { + this._addChild(this.options.nameFor.cdata, text); + } else { + this.addRawValue(text || ""); + } + } + + addRawValue = text => this.addValue(text); + + addDeclaration(){ + if(!this.options.declaration){ + }else{ + this.addPi("?xml"); + } + this.attributes = {} + } +} + +module.exports = BaseOutputBuilder; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js new file mode 100644 index 0000000..05ad1e1 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js @@ -0,0 +1,103 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const rootName = '!js_arr'; +const BaseOutputBuilder = require("./BaseOutputBuilder"); + +class JsArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = new Node(rootName); + this.currentNode = this.root; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push(this.currentNode); + this.currentNode = new Node(tag.name, this.attributes); + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + this.currentNode = this.tagsStack.pop(); //set parent node in scope + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode.child.push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.child.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode.child.push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + //TODO: set pi flag + if(!this.options.ignorePiTags){ + const node = new Node(name, this.attributes); + this.currentNode[":@"] = this.attributes; + this.currentNode.child.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root.child[0]; + } +} + + + +class Node{ + constructor(tagname, attributes){ + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments + if(attributes && Object.keys(attributes).length > 0) + this[":@"] = attributes; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js new file mode 100644 index 0000000..84281cb --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js @@ -0,0 +1,102 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsMinArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsMinArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = {[rootName]: []}; + this.currentNode = this.root; + this.currentNodeTagName = rootName; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push([this.currentNodeTagName,this.currentNode]); //this.currentNode is parent node here + this.currentNodeTagName = tag.name; + this.currentNode = { [tag.name]:[]} + if(Object.keys(this.attributes).length > 0){ + this.currentNode[":@"] = this.attributes; + this.attributes = {}; + } + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + const nodeName = this.currentNodeTagName; + const arr = this.tagsStack.pop(); //set parent node in scope + this.currentNodeTagName = arr[0]; + this.currentNode = arr[1]; + + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode[this.currentNodeTagName].push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode[this.currentNodeTagName].push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + if(!this.options.ignorePiTags){ + const node = { [name]:[]} + if(this.attributes){ + node[":@"] = this.attributes; + } + this.currentNode.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root[rootName]; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js new file mode 100644 index 0000000..261b0fc --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js @@ -0,0 +1,156 @@ + + +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(builderOptions){ + this.options = buildOptions(builderOptions); + this.registeredParsers = registerCommonValueParsers(); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsObjBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsObjBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, builderOptions,registeredParsers) { + super(); + //hold the raw detail of a tag and sequence with reference to the output + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = builderOptions; + this.registeredParsers = registeredParsers; + + this.root = {}; + this.parent = this.root; + this.tagName = rootName; + this.value = {}; + this.textValue = ""; + this.attributes = {}; + } + + addTag(tag){ + + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + + this.tagsStack.push([this.tagName, this.textValue, this.value]); //parent tag, parent text value, parent tag value (jsobj) + this.tagName = tag.name; + this.value = value; + this.textValue = ""; + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const tagName = this.tagName; + let value = this.value; + let textValue = this.textValue; + + //update tag text value + if(typeof value !== "object" && !Array.isArray(value)){ + value = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + }else if(textValue.length > 0){ + value[this.options.nameFor.text] = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + } + + + let resultTag= { + tagName: tagName, + value: value + }; + + if(this.options.onTagClose !== undefined){ + //TODO TagPathMatcher + resultTag = this.options.onClose(tagName, value, this.textValue, new TagPathMatcher(this.tagsStack,node)); + + if(!resultTag) return; + } + + //set parent node in scope + let arr = this.tagsStack.pop(); + let parentTag = arr[2]; + parentTag=this._addChildTo(resultTag.tagName, resultTag.value, parentTag); + + this.tagName = arr[0]; + this.textValue = arr[1]; + this.value = parentTag; + } + + _addChild(key, val){ + if(typeof this.value === "string"){ + this.value = { [this.options.nameFor.text] : this.value }; + } + + this._addChildTo(key, val, this.value); + // this.currentNode.leafType = false; + this.attributes = {}; + } + + _addChildTo(key, val, node){ + if(typeof node === 'string') node = {}; + if(!node[key]){ + node[key] = val; + }else{ //Repeated + if(!Array.isArray(node[key])){ //but not stored as array + node[key] = [node[key]]; + } + node[key].push(val); + } + return node; + } + + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + //TODO: use bytes join + if(this.textValue.length > 0) this.textValue += " " + text; + else this.textValue = text; + } + + addPi(name){ + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + this._addChild(name, value); + + } + getOutput(){ + return this.value; + } +} + +function isEmpty(obj) { + return Object.keys(obj).length === 0; +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js b/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js new file mode 100644 index 0000000..0776fdf --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js @@ -0,0 +1,96 @@ +const trimParser = require("../valueParsers/trim") +const booleanParser = require("../valueParsers/booleanParser") +const currencyParser = require("../valueParsers/currency") +const numberParser = require("../valueParsers/number") + +const defaultOptions={ + nameFor:{ + text: "#text", + comment: "", + cdata: "", + }, + // onTagClose: () => {}, + // onAttribute: () => {}, + piTag: false, + declaration: false, //"?xml" + tags: { + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + attributes:{ + prefix: "@_", + suffix: "", + groupBy: "", + + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + } +} + +//TODO +const withJoin = ["trim","join", /*"entities",*/"number","boolean","currency"/*, "date"*/] +const withoutJoin = ["trim", /*"entities",*/"number","boolean","currency"/*, "date"*/] + +function buildOptions(options){ + //clone + const finalOptions = { ... defaultOptions}; + + //add config missed in cloning + finalOptions.tags.valueParsers.push(...withJoin) + if(!this.preserveOrder) + finalOptions.tags.valueParsers.push(...withoutJoin); + + //add config missed in cloning + finalOptions.attributes.valueParsers.push(...withJoin) + + //override configuration + copyProperties(finalOptions,options); + return finalOptions; +} + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +function registerCommonValueParsers(){ + return { + "trim": new trimParser(), + // "join": this.entityParser.parse, + "boolean": new booleanParser(), + "number": new numberParser({ + hex: true, + leadingZeros: true, + eNotation: true + }), + "currency": new currencyParser(), + // "date": this.entityParser.parse, + } +} + +module.exports = { + buildOptions : buildOptions, + registerCommonValueParsers: registerCommonValueParsers +} \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/Report.js b/node_modules/fast-xml-parser/src/v5/Report.js new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/fast-xml-parser/src/v5/TagPath.js b/node_modules/fast-xml-parser/src/v5/TagPath.js new file mode 100644 index 0000000..d901cc3 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/TagPath.js @@ -0,0 +1,81 @@ +class TagPath{ + constructor(pathStr){ + let text = ""; + let tName = ""; + let pos; + let aName = ""; + let aVal = ""; + this.stack = [] + + for (let i = 0; i < pathStr.length; i++) { + let ch = pathStr[i]; + if(ch === " ") { + if(text.length === 0) continue; + tName = text; text = ""; + }else if(ch === "["){ + if(tName.length === 0){ + tName = text; text = ""; + } + i++; + for (; i < pathStr.length; i++) { + ch = pathStr[i]; + if(ch=== "=") continue; + else if(ch=== "]") {aName = text.trim(); text=""; break; i--;} + else if(ch === "'" || ch === '"'){ + let attrEnd = pathStr.indexOf(ch,i+1); + aVal = pathStr.substring(i+1, attrEnd); + i = attrEnd; + }else{ + text +=ch; + } + } + }else if(ch !== " " && text.length === 0 && tName.length > 0){//reading tagName + //save previous tag + this.stack.push(new TagPathNode(tName,pos,aName,aVal)); + text = ch; tName = ""; aName = ""; aVal = ""; + }else{ + text+=ch; + } + } + + //last tag in the path + if(tName.length >0 || text.length>0){ + this.stack.push(new TagPathNode(text||tName,pos,aName,aVal)); + } + } + + match(tagStack,node){ + if(this.stack[0].name !== "*"){ + if(this.stack.length !== tagStack.length +1) return false; + + //loop through tagPath and tagStack and match + for (let i = 0; i < this.tagStack.length; i++) { + if(!this.stack[i].match(tagStack[i])) return false; + } + } + if(!this.stack[this.stack.length - 1].match(node)) return false; + return true; + } +} + +class TagPathNode{ + constructor(name,position,attrName,attrVal){ + this.name = name; + this.position = position; + this.attrName = attrName, + this.attrVal = attrVal; + } + + match(node){ + let matching = true; + matching = node.name === this.name; + if(this.position) matching = node.position === this.position; + if(this.attrName) matching = node.attrs[this.attrName !== undefined]; + if(this.attrVal) matching = node.attrs[this.attrName !== this.attrVal]; + return matching; + } +} + +// console.log((new TagPath("* b[b]")).stack); +// console.log((new TagPath("a[a] b[b] c")).stack); +// console.log((new TagPath(" b [ b= 'cf sdadwa' ] a ")).stack); \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js b/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js new file mode 100644 index 0000000..af23607 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js @@ -0,0 +1,15 @@ +const TagPath = require("./TagPath"); + +class TagPathMatcher{ + constructor(stack,node){ + this.stack = stack; + this.node= node; + } + + match(path){ + const tagPath = new TagPath(path); + return tagPath.match(this.stack, this.node); + } +} + +module.exports = TagPathMatcher; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/XMLParser.js b/node_modules/fast-xml-parser/src/v5/XMLParser.js new file mode 100644 index 0000000..6de58ed --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/XMLParser.js @@ -0,0 +1,85 @@ +const { buildOptions} = require("./OptionsBuilder"); +const Xml2JsParser = require("./Xml2JsParser"); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + // console.log(this.options) + } + /** + * Parse XML data string to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + return this.parse(xmlData); + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + // if( validationOption){ + // if(validationOption === true) validationOption = {}; //validate with default options + + // const result = validator.validate(xmlData, validationOption); + // if (result !== true) { + // throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + // } + // } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parse(xmlData); + } + /** + * Parse XML data buffer to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parseBytesArr(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + }else{ + throw new Error("XML data is accepted in Bytes[] form.") + } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parseBytesArr(xmlData); + } + /** + * Parse XML data stream to JS object + * @param {fs.ReadableStream} xmlDataStream + */ + parseStream(xmlDataStream){ + if(!isStream(xmlDataStream)) throw new Error("FXP: Invalid stream input"); + + const orderedObjParser = new Xml2JsParser(this.options); + orderedObjParser.entityParser.addExternalEntities(this.externalEntities); + return orderedObjParser.parseStream(xmlDataStream); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +function isStream(stream){ + if(stream && typeof stream.read === "function" && typeof stream.on === "function" && typeof stream.readableEnded === "boolean") return true; + return false; +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js b/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js new file mode 100644 index 0000000..c4baab4 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js @@ -0,0 +1,237 @@ +const StringSource = require("./inputSource/StringSource"); +const BufferSource = require("./inputSource/BufferSource"); +const {readTagExp,readClosingTagName} = require("./XmlPartReader"); +const {readComment, readCdata,readDocType,readPiTag} = require("./XmlSpecialTagsReader"); +const TagPath = require("./TagPath"); +const TagPathMatcher = require("./TagPathMatcher"); +const EntitiesParser = require('./EntitiesParser'); + +//To hold the data of current tag +//This is usually used to compare jpath expression against current tag +class TagDetail{ + constructor(name){ + this.name = name; + this.position = 0; + // this.attributes = {}; + } +} + +class Xml2JsParser { + constructor(options) { + this.options = options; + + this.currentTagDetail = null; + this.tagTextData = ""; + this.tagsStack = []; + this.entityParser = new EntitiesParser(options.htmlEntities); + this.stopNodes = []; + for (let i = 0; i < this.options.stopNodes.length; i++) { + this.stopNodes.push(new TagPath(this.options.stopNodes[i])); + } + } + + parse(strData) { + this.source = new StringSource(strData); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + parseBytesArr(data) { + this.source = new BufferSource(data ); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + + parseXml() { + //TODO: Separate TagValueParser as separate class. So no scope issue in node builder class + + //OutputBuilder should be set in XML Parser + this.outputBuilder = this.options.OutputBuilder.getInstance(this.options); + this.root = { root: true}; + this.currentTagDetail = this.root; + + while(this.source.canRead()){ + let ch = this.source.readCh(); + if (ch === "") break; + + if(ch === "<"){//tagStart + let nextChar = this.source.readChAt(0); + if (nextChar === "" ) throw new Error("Unexpected end of source"); + + + if(nextChar === "!" || nextChar === "?"){ + this.source.updateBufferBoundary(); + //previously collected text should be added to current node + this.addTextNode(); + + this.readSpecialTag(nextChar);// Read DOCTYPE, comment, CDATA, PI tag + }else if(nextChar === "/"){ + this.source.updateBufferBoundary(); + this.readClosingTag(); + // console.log(this.source.buffer.length, this.source.readable); + // console.log(this.tagsStack.length); + }else{//opening tag + this.readOpeningTag(); + } + }else{ + this.tagTextData += ch; + } + }//End While loop + if(this.tagsStack.length > 0 || ( this.tagTextData !== "undefined" && this.tagTextData.trimEnd().length > 0) ) throw new Error("Unexpected data in the end of document"); + } + + /** + * read closing paired tag. Set parent tag in scope. + * skip a node on user's choice + */ + readClosingTag(){ + const tagName = this.processTagName(readClosingTagName(this.source)); + // console.log(tagName, this.tagsStack.length); + this.validateClosingTag(tagName); + // All the text data collected, belongs to current tag. + if(!this.currentTagDetail.root) this.addTextNode(); + this.outputBuilder.closeTag(); + // Since the tag is closed now, parent tag comes in scope + this.currentTagDetail = this.tagsStack.pop(); + } + + validateClosingTag(tagName){ + // This can't be unpaired tag, or a stop tag. + if(this.isUnpaired(tagName) || this.isStopNode(tagName)) throw new Error(`Unexpected closing tag '${tagName}'`); + // This must match with last opening tag + else if(tagName !== this.currentTagDetail.name) + throw new Error(`Unexpected closing tag '${tagName}' expecting '${this.currentTagDetail.name}'`) + } + + /** + * Read paired, unpaired, self-closing, stop and special tags. + * Create a new node + * Push paired tag in stack. + */ + readOpeningTag(){ + //save previously collected text data to current node + this.addTextNode(); + + //create new tag + let tagExp = readTagExp(this, ">" ); + + // process and skip from tagsStack For unpaired tag, self closing tag, and stop node + const tagDetail = new TagDetail(tagExp.tagName); + if(this.isUnpaired(tagExp.tagName)) { + //TODO: this will lead 2 extra stack operation + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(tagExp.selfClosing){ + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(this.isStopNode(this.currentTagDetail)){ + // TODO: let's user set a stop node boundary detector for complex contents like script tag + //TODO: pass tag name only to avoid string operations + const content = source.readUptoCloseTag(` 0){ + //TODO: shift parsing to output builder + + this.outputBuilder.addValue(this.replaceEntities(this.tagTextData)); + } + this.tagTextData = ""; + } + // } + } + + processAttrName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + processTagName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + /** + * Generate tags path from tagsStack + */ + tagsPath(tagName){ + //TODO: return TagPath Object. User can call match method with path + return ""; + } + + isUnpaired(tagName){ + return this.options.tags.unpaired.indexOf(tagName) !== -1; + } + + /** + * valid expressions are + * tag nested + * * nested + * tag nested[attribute] + * tag nested[attribute=""] + * tag nested[attribute!=""] + * tag nested:0 //for future + * @param {string} tagName + * @returns + */ + isStopNode(node){ + for (let i = 0; i < this.stopNodes.length; i++) { + const givenPath = this.stopNodes[i]; + if(givenPath.match(this.tagsStack, node)) return true; + } + return false + } + + replaceEntities(text){ + //TODO: if option is set then replace entities + return this.entityParser.parse(text) + } +} + +function resolveNameSpace(name, removeNSPrefix) { + if (removeNSPrefix) { + const parts = name.split(':'); + if(parts.length === 2){ + if (parts[0] === 'xmlns') return ''; + else return parts[1]; + }else reportError(`Multiple namespaces ${name}`) + } + return name; +} + +module.exports = Xml2JsParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/XmlPartReader.js b/node_modules/fast-xml-parser/src/v5/XmlPartReader.js new file mode 100644 index 0000000..56b180e --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/XmlPartReader.js @@ -0,0 +1,212 @@ +'use strict'; + +/** + * find paired tag for a stop node + * @param {string} xmlDoc + * @param {string} tagName + * @param {number} i : start index + */ +function readStopNode(xmlDoc, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlDoc.length; i++) { + if( xmlDoc[i] === "<"){ + if (xmlDoc[i+1] === "/") {//close tag + const closeIndex = findSubStrIndex(xmlDoc, ">", i, `${tagName} is not closed`); + let closeTagName = xmlDoc.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlDoc.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlDoc[i+1] === '?') { + const closeIndex = findSubStrIndex(xmlDoc, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 3) === '!--') { + const closeIndex = findSubStrIndex(xmlDoc, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 2) === '![') { + const closeIndex = findSubStrIndex(xmlDoc, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlDoc, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +/** + * Read closing tag name + * @param {Source} source + * @returns tag name + */ +function readClosingTagName(source){ + let text = ""; //temporary data + while(source.canRead()){ + let ch = source.readCh(); + // if (ch === null || ch === undefined) break; + // source.updateBuffer(); + + if (ch === ">") return text.trimEnd(); + else text += ch; + } + throw new Error(`Unexpected end of source. Reading '${substr}'`); +} + +/** + * Read XML tag and build attributes map + * This function can be used to read normal tag, pi tag. + * This function can't be used to read comment, CDATA, DOCTYPE. + * Eg + * @param {string} xmlDoc + * @param {number} startIndex starting index + * @returns tag expression includes tag name & attribute string + */ +function readTagExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i); i++) { + const char = parser.source.readChAt(i); + + if (char === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (char === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } else if (char === '>' && !inSingleQuotes && !inDoubleQuotes) { + // If not inside quotes, stop reading at '>' + EOE = true; + break; + } + + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '>'"); + + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function readPiExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i) ; i++) { + const currentChar = parser.source.readChAt(i); + const nextChar = parser.source.readChAt(i+1); + + if (currentChar === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (currentChar === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (currentChar === '?' && nextChar === '>') { + EOE = true; + break; // Exit the loop when '?>' is found + } + } + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed in PI tag expression"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '?>'"); + + if(!parser.options.attributes.ignore){ + //TODO: use regex to verify attributes if not set to ignore + } + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function buildTagExpObj(exp, parser){ + const tagExp = { + tagName: "", + selfClosing: false + }; + let attrsExp = ""; + + if(exp[exp.length -1] === "/") tagExp.selfClosing = true; + + //separate tag name + let i = 0; + for (; i < exp.length; i++) { + const char = exp[i]; + if(char === " "){ + tagExp.tagName = exp.substring(0, i); + attrsExp = exp.substring(i + 1); + break; + } + } + //only tag + if(tagExp.tagName.length === 0 && i === exp.length)tagExp.tagName = exp; + + tagExp.tagName = tagExp.tagName.trimEnd(); + + if(!parser.options.attributes.ignore && attrsExp.length > 0){ + parseAttributesExp(attrsExp,parser) + } + + return tagExp; +} + +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function parseAttributesExp(attrStr, parser) { + const matches = getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + for (let i = 0; i < len; i++) { + let attrName = parser.processAttrName(matches[i][1]); + let attrVal = parser.replaceEntities(matches[i][4] || true); + + parser.outputBuilder.addAttribute(attrName, attrVal); + } +} + + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +module.exports = { + readStopNode: readStopNode, + readClosingTagName: readClosingTagName, + readTagExp: readTagExp, + readPiExp: readPiExp, +} \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js b/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js new file mode 100644 index 0000000..0fba196 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js @@ -0,0 +1,118 @@ +const {readPiExp} = require("./XmlPartReader"); + +function readCdata(parser){ + //"); + parser.outputBuilder.addCdata(text); +} +function readPiTag(parser){ + //"); + if(!tagExp) throw new Error("Invalid Pi Tag expression."); + + if (tagExp.tagName === "?xml") {//TODO: test if tagName is just xml + parser.outputBuilder.addDeclaration(); + } else { + parser.outputBuilder.addPi("?"+tagExp.tagName); + } +} + +function readComment(parser){ + //"); + parser.outputBuilder.addComment(text); +} + +const DOCTYPE_tags = { + "EL":/^EMENT\s+([^\s>]+)\s+(ANY|EMPTY|\(.+\)\s*$)/m, + "AT":/^TLIST\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+$/m, + "NO":/^TATION.+$/m +} +function readDocType(parser){ + //"); + const regx = DOCTYPE_tags[str]; + if(regx){ + const match = dTagExp.match(regx); + if(!match) throw new Error("Invalid DOCTYPE"); + }else throw new Error("Invalid DOCTYPE"); + } + }else if( ch === '>' && lastch === "]"){//end of doctype + return; + } + }else if( ch === '>'){//end of doctype + return; + }else if( ch === '['){ + hasBody = true; + }else{ + lastch = ch; + } + }//End While loop + +} + +function registerEntity(parser){ + //read Entity + let attrBoundary=""; + let name ="", val =""; + while(source.canRead()){ + let ch = source.readCh(); + + if(attrBoundary){ + if (ch === attrBoundary){ + val = text; + text = "" + } + }else if(ch === " " || ch === "\t"){ + if(!name){ + name = text.trimStart(); + text = ""; + } + }else if (ch === '"' || ch === "'") {//start of attrBoundary + attrBoundary = ch; + }else if(ch === ">"){ + parser.entityParser.addExternalEntity(name,val); + return; + }else{ + text+=ch; + } + } +} + +module.exports = { + readCdata: readCdata, + readComment:readComment, + readDocType:readDocType, + readPiTag:readPiTag +} \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js b/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js new file mode 100644 index 0000000..b83ce46 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js @@ -0,0 +1,118 @@ +const Constants = { + space: 32, + tab: 9 +} +class BufferSource{ + constructor(bytesArr){ + this.line = 1; + this.cols = 0; + this.buffer = bytesArr; + this.startIndex = 0; + } + + + + readCh() { + return String.fromCharCode(this.buffer[this.startIndex++]); + } + + readChAt(index) { + return String.fromCharCode(this.buffer[this.startIndex+index]); + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.slice(from, from + n).toString(); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + const stopBuffer = Buffer.from(stopStr); + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.slice(this.startIndex, i).toString(); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + +readUptoCloseTag(stopStr) { //stopStr: "'){ //TODO: if it should be equivalent ASCII + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.slice(this.startIndex, stopIndex - 1 ).toString(); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + + readFromBuffer(n, shouldUpdate) { + let ch; + if (n === 1) { + ch = this.buffer[this.startIndex]; + if (ch === 10) { + this.line++; + this.cols = 1; + } else { + this.cols++; + } + ch = String.fromCharCode(ch); + } else { + this.cols += n; + ch = this.buffer.slice(this.startIndex, this.startIndex + n).toString(); + } + if (shouldUpdate) this.updateBuffer(n); + return ch; + } + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = BufferSource; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js b/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js new file mode 100644 index 0000000..a996528 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js @@ -0,0 +1,123 @@ +const whiteSpaces = [" ", "\n", "\t"]; + + +class StringSource{ + constructor(str){ + this.line = 1; + this.cols = 0; + this.buffer = str; + //a boundary pointer to indicate where from the buffer dat should be read + // data before this pointer can be deleted to free the memory + this.startIndex = 0; + } + + readCh() { + return this.buffer[this.startIndex++]; + } + + readChAt(index) { + return this.buffer[this.startIndex+index]; + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.substring(from, from + n); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.substring(this.startIndex, i); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readUptoCloseTag(stopStr) { //stopStr: "'){ + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.substring(this.startIndex, stopIndex - 1 ); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readFromBuffer(n, updateIndex){ + let ch; + if(n===1){ + ch = this.buffer[this.startIndex]; + // if(ch === "\n") { + // this.line++; + // this.cols = 1; + // }else{ + // this.cols++; + // } + }else{ + ch = this.buffer.substring(this.startIndex, this.startIndex + n); + // if("".indexOf("\n") !== -1){ + // //TODO: handle the scenario when there are multiple lines + // //TODO: col should be set to number of chars after last '\n' + // // this.cols = 1; + // }else{ + // this.cols += n; + + // } + } + if(updateIndex) this.updateBufferBoundary(n); + return ch; + } + + //TODO: rename to updateBufferReadIndex + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = StringSource; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js b/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js b/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js new file mode 100644 index 0000000..f8f5d12 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js @@ -0,0 +1,23 @@ +class boolParser{ + constructor(trueList, falseList){ + if(trueList) + this.trueList = trueList; + else + this.trueList = ["true"]; + + if(falseList) + this.falseList = falseList; + else + this.falseList = ["false"]; + } + parse(val){ + if (typeof val === 'string') { + //TODO: performance: don't convert + const temp = val.toLowerCase(); + if(this.trueList.indexOf(temp) !== -1) return true; + else if(this.falseList.indexOf(temp) !== -1 ) return false; + } + return val; + } +} +module.exports = boolParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js b/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js new file mode 100644 index 0000000..21b8050 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js @@ -0,0 +1,20 @@ +function boolParserExt(val){ + if(isArray(val)){ + for (let i = 0; i < val.length; i++) { + val[i] = parse(val[i]) + } + }else{ + val = parse(val) + } + return val; +} + +function parse(val){ + if (typeof val === 'string') { + const temp = val.toLowerCase(); + if(temp === 'true' || temp ==="yes" || temp==="1") return true; + else if(temp === 'false' || temp ==="no" || temp==="0") return false; + } + return val; +} +module.exports = boolParserExt; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js b/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js new file mode 100644 index 0000000..5ec0000 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js @@ -0,0 +1,31 @@ + +const localeMap = { + "$":"en-US", + "€":"de-DE", + "£":"en-GB", + "¥":"ja-JP", + "₹":"en-IN", +} + +const currencyCheckRegex = /^\s*(?:-|\+)?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d{1,2})?\s*(?:\$|€|¥|₹)?\s*$/u; + +class CurrencyParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + if(val.indexOf(",,") !== -1 && val.indexOf(".." !== -1)){ + const match = val.match(currencyCheckRegex); + if(match){ + const locale = this.options.locale || localeMap[match[2]||match[5]||"₹"]; + const formatter = new Intl.NumberFormat(locale) + val = val.replace(/[^0-9,.]/g, '').trim(); + val = Number(val.replace(formatter.format(1000)[1], '')); + } + } + } + return val; + } +} +module.exports = CurrencyParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/join.js b/node_modules/fast-xml-parser/src/v5/valueParsers/join.js new file mode 100644 index 0000000..d7f2027 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/join.js @@ -0,0 +1,14 @@ +/** + * + * @param {array} val + * @param {string} by + * @returns + */ +function join(val, by=" "){ + if(isArray(val)){ + val.join(by) + } + return val; +} + +module.exports = join; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/number.js b/node_modules/fast-xml-parser/src/v5/valueParsers/number.js new file mode 100644 index 0000000..bef3803 --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/number.js @@ -0,0 +1,16 @@ +const toNumber = require("strnum"); + + +class numParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + val = toNumber(val,this.options); + } + return val; + } +} + +module.exports = numParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js b/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js new file mode 100644 index 0000000..ecce49a --- /dev/null +++ b/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js @@ -0,0 +1,8 @@ +class trimmer{ + parse(val){ + if(typeof val === "string") return val.trim(); + else return val; + } +} + +module.exports = trimmer; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/validator.js b/node_modules/fast-xml-parser/src/validator.js new file mode 100644 index 0000000..11b051b --- /dev/null +++ b/node_modules/fast-xml-parser/src/validator.js @@ -0,0 +1,423 @@ +'use strict'; + +const util = require('./util'); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} diff --git a/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js b/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js new file mode 100644 index 0000000..5707ee0 --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js @@ -0,0 +1,270 @@ +'use strict'; +//parse Empty Node as self closing node +const buildFromOrderedJs = require('./orderedJs2Xml'); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; diff --git a/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js b/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js b/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js new file mode 100644 index 0000000..bcf9dee --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js @@ -0,0 +1,152 @@ +const util = require('../util'); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; diff --git a/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js b/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js new file mode 100644 index 0000000..bca3776 --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js @@ -0,0 +1,48 @@ + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js b/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js new file mode 100644 index 0000000..7d1177f --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js @@ -0,0 +1,593 @@ +'use strict'; +///@ts-check + +const util = require('../util'); +const xmlNode = require('./xmlNode'); +const readDocType = require("./DocTypeReader"); +const toNumber = require("strnum"); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; diff --git a/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js b/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js new file mode 100644 index 0000000..ffaf59b --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js @@ -0,0 +1,58 @@ +const { buildOptions} = require("./OptionsBuilder"); +const OrderedObjParser = require("./OrderedObjParser"); +const { prettify} = require("./node2json"); +const validator = require('../validator'); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/node_modules/fast-xml-parser/src/xmlparser/node2json.js b/node_modules/fast-xml-parser/src/xmlparser/node2json.js new file mode 100644 index 0000000..3045573 --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/node2json.js @@ -0,0 +1,113 @@ +'use strict'; + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; diff --git a/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js b/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js new file mode 100644 index 0000000..9319524 --- /dev/null +++ b/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js @@ -0,0 +1,25 @@ +'use strict'; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; \ No newline at end of file diff --git a/node_modules/gaxios/CHANGELOG.md b/node_modules/gaxios/CHANGELOG.md new file mode 100644 index 0000000..7237398 --- /dev/null +++ b/node_modules/gaxios/CHANGELOG.md @@ -0,0 +1,350 @@ +# Changelog + +## [6.3.0](https://github.com/googleapis/gaxios/compare/v6.2.0...v6.3.0) (2024-02-01) + + +### Features + +* Support URL objects ([#598](https://github.com/googleapis/gaxios/issues/598)) ([ef40c61](https://github.com/googleapis/gaxios/commit/ef40c61fabf0a48b2f08be085ee0c56dc32cf78c)) + +## [6.2.0](https://github.com/googleapis/gaxios/compare/v6.1.1...v6.2.0) (2024-01-31) + + +### Features + +* Extend `instanceof` Support for GaxiosError ([#593](https://github.com/googleapis/gaxios/issues/593)) ([4fd1fe2](https://github.com/googleapis/gaxios/commit/4fd1fe2c91b9888c4f976cf91e752d407b99ec75)) + + +### Bug Fixes + +* Do Not Mutate Config for Redacted Retries ([#597](https://github.com/googleapis/gaxios/issues/597)) ([4d1a551](https://github.com/googleapis/gaxios/commit/4d1a55134940031a1e0ff2392ab0b08c186166f0)) +* Return text when content type is text/* ([#579](https://github.com/googleapis/gaxios/issues/579)) ([3cc1c76](https://github.com/googleapis/gaxios/commit/3cc1c76a08d98daac01c83bed6f9480320ec0a37)) + +## [6.1.1](https://github.com/googleapis/gaxios/compare/v6.1.0...v6.1.1) (2023-09-07) + + +### Bug Fixes + +* Don't throw an error within a `GaxiosError` ([#569](https://github.com/googleapis/gaxios/issues/569)) ([035d9dd](https://github.com/googleapis/gaxios/commit/035d9dd833c3ad63148bc50facaee421f4792192)) + +## [6.1.0](https://github.com/googleapis/gaxios/compare/v6.0.4...v6.1.0) (2023-08-11) + + +### Features + +* Prevent Auth Logging by Default ([#565](https://github.com/googleapis/gaxios/issues/565)) ([b28e562](https://github.com/googleapis/gaxios/commit/b28e5628f5964e2ecc04cc1df8a54948567a4b48)) + +## [6.0.4](https://github.com/googleapis/gaxios/compare/v6.0.3...v6.0.4) (2023-08-03) + + +### Bug Fixes + +* **deps:** Update https-proxy-agent to 7.0.1 and fix imports and tests ([#560](https://github.com/googleapis/gaxios/issues/560)) ([5c877e2](https://github.com/googleapis/gaxios/commit/5c877e28c3c9336a87b50536c074cb215b779d8e)) + +## [6.0.3](https://github.com/googleapis/gaxios/compare/v6.0.2...v6.0.3) (2023-07-24) + + +### Bug Fixes + +* Handle invalid json when Content-Type=application/json ([#558](https://github.com/googleapis/gaxios/issues/558)) ([71602eb](https://github.com/googleapis/gaxios/commit/71602ebc2ab18d5af904b152723756f57fb13bce)) + +## [6.0.2](https://github.com/googleapis/gaxios/compare/v6.0.1...v6.0.2) (2023-07-20) + + +### Bug Fixes + +* Revert attempting to convert 'text/plain', leave as data in GaxiosError ([#556](https://github.com/googleapis/gaxios/issues/556)) ([d603bde](https://github.com/googleapis/gaxios/commit/d603bde35f698564c028108d24c6891cec3b8ea1)) + +## [6.0.1](https://github.com/googleapis/gaxios/compare/v6.0.0...v6.0.1) (2023-07-20) + + +### Bug Fixes + +* Add text to responseType switch statement ([#554](https://github.com/googleapis/gaxios/issues/554)) ([899cf1f](https://github.com/googleapis/gaxios/commit/899cf1fb088f49927624d526f6de212837a31b9d)) + +## [6.0.0](https://github.com/googleapis/gaxios/compare/v5.1.3...v6.0.0) (2023-07-12) + + +### ⚠ BREAKING CHANGES + +* add status as a number to GaxiosError, change code to error code as a string ([#552](https://github.com/googleapis/gaxios/issues/552)) +* migrate to Node 14 ([#548](https://github.com/googleapis/gaxios/issues/548)) +* examine response content-type if no contentType is set ([#535](https://github.com/googleapis/gaxios/issues/535)) + +### Bug Fixes + +* Add status as a number to GaxiosError, change code to error code as a string ([#552](https://github.com/googleapis/gaxios/issues/552)) ([88ba2e9](https://github.com/googleapis/gaxios/commit/88ba2e99e32b66d84725c9ea9ad95152bd1dc653)) +* Examine response content-type if no contentType is set ([#535](https://github.com/googleapis/gaxios/issues/535)) ([cd8ca7b](https://github.com/googleapis/gaxios/commit/cd8ca7b209f0ba932082a80ace8fec608a71facf)) + + +### Miscellaneous Chores + +* Migrate to Node 14 ([#548](https://github.com/googleapis/gaxios/issues/548)) ([b9b26eb](https://github.com/googleapis/gaxios/commit/b9b26eb2c4af35633efd91770aa24c4b5d9019b4)) + +## [5.1.3](https://github.com/googleapis/gaxios/compare/v5.1.2...v5.1.3) (2023-07-05) + + +### Bug Fixes + +* Translate GaxiosError message to object regardless of return type (return data as default) ([#546](https://github.com/googleapis/gaxios/issues/546)) ([adfd570](https://github.com/googleapis/gaxios/commit/adfd57068a98d03921d5383fed11a652a21d59dd)) + +## [5.1.2](https://github.com/googleapis/gaxios/compare/v5.1.1...v5.1.2) (2023-06-25) + + +### Bug Fixes + +* Revert changes to error handling due to downstream breakage ([#544](https://github.com/googleapis/gaxios/issues/544)) ([64fbf07](https://github.com/googleapis/gaxios/commit/64fbf07f3697f40b75a9e7dbe8bff7f6243a9e12)) + +## [5.1.1](https://github.com/googleapis/gaxios/compare/v5.1.0...v5.1.1) (2023-06-23) + + +### Bug Fixes + +* Translate GaxiosError message to object regardless of return type ([#537](https://github.com/googleapis/gaxios/issues/537)) ([563c653](https://github.com/googleapis/gaxios/commit/563c6537a06bc64d5c6e918090c00ec7a586cecb)) + +## [5.1.0](https://github.com/googleapis/gaxios/compare/v5.0.2...v5.1.0) (2023-03-06) + + +### Features + +* Add support for custom backoff ([#498](https://github.com/googleapis/gaxios/issues/498)) ([4a34467](https://github.com/googleapis/gaxios/commit/4a344678110864d97818a8272ebcc5e1c4921b52)) + +## [5.0.2](https://github.com/googleapis/gaxios/compare/v5.0.1...v5.0.2) (2022-09-09) + + +### Bug Fixes + +* Do not import the whole google-gax from proto JS ([#1553](https://github.com/googleapis/gaxios/issues/1553)) ([#501](https://github.com/googleapis/gaxios/issues/501)) ([6f58d1e](https://github.com/googleapis/gaxios/commit/6f58d1e80ce6b196d17fbc75dc47d8d20804920a)) +* use google-gax v3.3.0 ([6f58d1e](https://github.com/googleapis/gaxios/commit/6f58d1e80ce6b196d17fbc75dc47d8d20804920a)) + +## [5.0.1](https://github.com/googleapis/gaxios/compare/v5.0.0...v5.0.1) (2022-07-04) + + +### Bug Fixes + +* **types:** loosen AbortSignal type ([5a379ca](https://github.com/googleapis/gaxios/commit/5a379ca123f08f286c4774711a7a3293bffc1ea6)) + +## [5.0.0](https://github.com/googleapis/gaxios/compare/v4.3.3...v5.0.0) (2022-04-20) + + +### ⚠ BREAKING CHANGES + +* drop node 10 from engines list, update typescript to 4.6.3 (#477) + +### Features + +* handling missing process global ([c30395b](https://github.com/googleapis/gaxios/commit/c30395bbf34d889e75c7c72a7dff701dc7a98244)) + + +### Build System + +* drop node 10 from engines list, update typescript to 4.6.3 ([#477](https://github.com/googleapis/gaxios/issues/477)) ([a926962](https://github.com/googleapis/gaxios/commit/a9269624a70aa6305599cc0af079d0225ed6af50)) + +### [4.3.3](https://github.com/googleapis/gaxios/compare/v4.3.2...v4.3.3) (2022-04-08) + + +### Bug Fixes + +* do not stringify form data ([#475](https://github.com/googleapis/gaxios/issues/475)) ([17370dc](https://github.com/googleapis/gaxios/commit/17370dcdfd4568d7f3f0855961030d238166836a)) + +### [4.3.2](https://www.github.com/googleapis/gaxios/compare/v4.3.1...v4.3.2) (2021-09-14) + + +### Bug Fixes + +* address codeql warning with hostname matches ([#415](https://www.github.com/googleapis/gaxios/issues/415)) ([5a4d060](https://www.github.com/googleapis/gaxios/commit/5a4d06019343aa08e1bcf8e05108e22ac3b12636)) + +### [4.3.1](https://www.github.com/googleapis/gaxios/compare/v4.3.0...v4.3.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#427](https://www.github.com/googleapis/gaxios/issues/427)) ([819219e](https://www.github.com/googleapis/gaxios/commit/819219ed742814259c525bdf5721b28234019c08)) + +## [4.3.0](https://www.github.com/googleapis/gaxios/compare/v4.2.1...v4.3.0) (2021-05-26) + + +### Features + +* allow cert and key to be provided for mTLS ([#399](https://www.github.com/googleapis/gaxios/issues/399)) ([d74ab91](https://www.github.com/googleapis/gaxios/commit/d74ab9125d581e46d655614729872e79317c740d)) + +### [4.2.1](https://www.github.com/googleapis/gaxios/compare/v4.2.0...v4.2.1) (2021-04-20) + + +### Bug Fixes + +* **deps:** upgrade webpack and karma-webpack ([#379](https://www.github.com/googleapis/gaxios/issues/379)) ([75c9013](https://www.github.com/googleapis/gaxios/commit/75c90132e99c2f960c01e0faf0f8e947c920aa73)) + +## [4.2.0](https://www.github.com/googleapis/gaxios/compare/v4.1.0...v4.2.0) (2021-03-01) + + +### Features + +* handle application/x-www-form-urlencoded/Buffer ([#374](https://www.github.com/googleapis/gaxios/issues/374)) ([ce21e9c](https://www.github.com/googleapis/gaxios/commit/ce21e9ccd228578a9f90bb2fddff797cec4a9402)) + +## [4.1.0](https://www.github.com/googleapis/gaxios/compare/v4.0.1...v4.1.0) (2020-12-08) + + +### Features + +* add an option to configure the fetch impl ([#342](https://www.github.com/googleapis/gaxios/issues/342)) ([2e081ef](https://www.github.com/googleapis/gaxios/commit/2e081ef161d84aa435788e8d525d393dc7964117)) +* add no_proxy env variable ([#361](https://www.github.com/googleapis/gaxios/issues/361)) ([efe72a7](https://www.github.com/googleapis/gaxios/commit/efe72a71de81d466160dde5da551f7a41acc3ac4)) + +### [4.0.1](https://www.github.com/googleapis/gaxios/compare/v4.0.0...v4.0.1) (2020-10-27) + + +### Bug Fixes + +* prevent bonus ? with empty qs params ([#357](https://www.github.com/googleapis/gaxios/issues/357)) ([b155f76](https://www.github.com/googleapis/gaxios/commit/b155f76cbc4c234da1d99c26691296702342c205)) + +## [4.0.0](https://www.github.com/googleapis/gaxios/compare/v3.2.0...v4.0.0) (2020-10-21) + + +### ⚠ BREAKING CHANGES + +* parameters in `url` and parameters provided via params will now be combined. + +### Bug Fixes + +* drop requirement on URL/combine url and params ([#338](https://www.github.com/googleapis/gaxios/issues/338)) ([e166bc6](https://www.github.com/googleapis/gaxios/commit/e166bc6721fd979070ab3d9c69b71ffe9ee061c7)) + +## [3.2.0](https://www.github.com/googleapis/gaxios/compare/v3.1.0...v3.2.0) (2020-09-14) + + +### Features + +* add initial retry delay, and set default to 100ms ([#336](https://www.github.com/googleapis/gaxios/issues/336)) ([870326b](https://www.github.com/googleapis/gaxios/commit/870326b8245f16fafde0b0c32cfd2f277946e3a1)) + +## [3.1.0](https://www.github.com/googleapis/gaxios/compare/v3.0.4...v3.1.0) (2020-07-30) + + +### Features + +* pass default adapter to adapter option ([#319](https://www.github.com/googleapis/gaxios/issues/319)) ([cf06bd9](https://www.github.com/googleapis/gaxios/commit/cf06bd9f51cbe707ed5973e390d31a091d4537c1)) + +### [3.0.4](https://www.github.com/googleapis/gaxios/compare/v3.0.3...v3.0.4) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#306](https://www.github.com/googleapis/gaxios/issues/306)) ([8514672](https://www.github.com/googleapis/gaxios/commit/8514672f9d56bc6f077dcbab050b3342d4e343c6)) + +### [3.0.3](https://www.github.com/googleapis/gaxios/compare/v3.0.2...v3.0.3) (2020-04-20) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/gaxios/issues/468)) ([#272](https://www.github.com/googleapis/gaxios/issues/272)) ([cf1b7cb](https://www.github.com/googleapis/gaxios/commit/cf1b7cb66e4c98405236834e63349931b4f35b90)) + +### [3.0.2](https://www.github.com/googleapis/gaxios/compare/v3.0.1...v3.0.2) (2020-03-24) + + +### Bug Fixes + +* continue replacing application/x-www-form-urlencoded with application/json ([#263](https://www.github.com/googleapis/gaxios/issues/263)) ([dca176d](https://www.github.com/googleapis/gaxios/commit/dca176df0990f2c22255f9764405c496ea07ada2)) + +### [3.0.1](https://www.github.com/googleapis/gaxios/compare/v3.0.0...v3.0.1) (2020-03-23) + + +### Bug Fixes + +* allow an alternate JSON content-type to be set ([#257](https://www.github.com/googleapis/gaxios/issues/257)) ([698a29f](https://www.github.com/googleapis/gaxios/commit/698a29ff3b22f30ea99ad190c4592940bef88f1f)) + +## [3.0.0](https://www.github.com/googleapis/gaxios/compare/v2.3.2...v3.0.0) (2020-03-19) + + +### ⚠ BREAKING CHANGES + +* **deps:** TypeScript introduced breaking changes in generated code in 3.7.x +* drop Node 8 from engines field (#254) + +### Features + +* drop Node 8 from engines field ([#254](https://www.github.com/googleapis/gaxios/issues/254)) ([8c9fff7](https://www.github.com/googleapis/gaxios/commit/8c9fff7f92f70f029292c906c62d194c1d58827d)) +* **deps:** updates to latest TypeScript ([#253](https://www.github.com/googleapis/gaxios/issues/253)) ([054267b](https://www.github.com/googleapis/gaxios/commit/054267bf12e1801c134e3b5cae92dcc5ea041fab)) + +### [2.3.2](https://www.github.com/googleapis/gaxios/compare/v2.3.1...v2.3.2) (2020-02-28) + + +### Bug Fixes + +* update github repo in package ([#239](https://www.github.com/googleapis/gaxios/issues/239)) ([7e750cb](https://www.github.com/googleapis/gaxios/commit/7e750cbaaa59812817d725c74fb9d364c4b71096)) + +### [2.3.1](https://www.github.com/googleapis/gaxios/compare/v2.3.0...v2.3.1) (2020-02-13) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v5 ([#233](https://www.github.com/googleapis/gaxios/issues/233)) ([56de0a8](https://www.github.com/googleapis/gaxios/commit/56de0a824a2f9622e3e4d4bdd41adccd812a30b4)) + +## [2.3.0](https://www.github.com/googleapis/gaxios/compare/v2.2.2...v2.3.0) (2020-01-31) + + +### Features + +* add promise support for onRetryAttempt and shouldRetry ([#223](https://www.github.com/googleapis/gaxios/issues/223)) ([061afa3](https://www.github.com/googleapis/gaxios/commit/061afa381a51d39823e63accf3dacd16e191f3b9)) + +### [2.2.2](https://www.github.com/googleapis/gaxios/compare/v2.2.1...v2.2.2) (2020-01-08) + + +### Bug Fixes + +* **build:** add publication configuration ([#218](https://www.github.com/googleapis/gaxios/issues/218)) ([43e581f](https://www.github.com/googleapis/gaxios/commit/43e581ff4ed5e79d72f6f29748a5eebb6bff1229)) + +### [2.2.1](https://www.github.com/googleapis/gaxios/compare/v2.2.0...v2.2.1) (2020-01-04) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v4 ([#201](https://www.github.com/googleapis/gaxios/issues/201)) ([5cdeef2](https://www.github.com/googleapis/gaxios/commit/5cdeef288a0c5c544c0dc2659aafbb2215d06c4b)) +* remove retryDelay option ([#203](https://www.github.com/googleapis/gaxios/issues/203)) ([d21e08d](https://www.github.com/googleapis/gaxios/commit/d21e08d2aada980d39bc5ca7093d54452be2d646)) + +## [2.2.0](https://www.github.com/googleapis/gaxios/compare/v2.1.1...v2.2.0) (2019-12-05) + + +### Features + +* populate GaxiosResponse with raw response information (res.url) ([#189](https://www.github.com/googleapis/gaxios/issues/189)) ([53a7f54](https://www.github.com/googleapis/gaxios/commit/53a7f54cc0f20320d7a6a21a9a9f36050cec2eec)) + + +### Bug Fixes + +* don't retry a request that is aborted intentionally ([#190](https://www.github.com/googleapis/gaxios/issues/190)) ([ba9777b](https://www.github.com/googleapis/gaxios/commit/ba9777b15b5262f8288a8bb3cca49a1de8427d8e)) +* **deps:** pin TypeScript below 3.7.0 ([5373f07](https://www.github.com/googleapis/gaxios/commit/5373f0793a765965a8221ecad2f99257ed1b7444)) + +### [2.1.1](https://www.github.com/googleapis/gaxios/compare/v2.1.0...v2.1.1) (2019-11-15) + + +### Bug Fixes + +* **docs:** snippets are now replaced in jsdoc comments ([#183](https://www.github.com/googleapis/gaxios/issues/183)) ([8dd1324](https://www.github.com/googleapis/gaxios/commit/8dd1324256590bd2f2e9015c813950e1cd8cb330)) + +## [2.1.0](https://www.github.com/googleapis/gaxios/compare/v2.0.3...v2.1.0) (2019-10-09) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v3 ([#172](https://www.github.com/googleapis/gaxios/issues/172)) ([4a38f35](https://www.github.com/googleapis/gaxios/commit/4a38f35)) + + +### Features + +* **TypeScript:** agent can now be passed as builder method, rather than agent instance ([c84ddd6](https://www.github.com/googleapis/gaxios/commit/c84ddd6)) + +### [2.0.3](https://www.github.com/googleapis/gaxios/compare/v2.0.2...v2.0.3) (2019-09-11) + + +### Bug Fixes + +* do not override content-type if its given ([#158](https://www.github.com/googleapis/gaxios/issues/158)) ([f49e0e6](https://www.github.com/googleapis/gaxios/commit/f49e0e6)) +* improve stream detection logic ([6c41537](https://www.github.com/googleapis/gaxios/commit/6c41537)) +* revert header change ([#161](https://www.github.com/googleapis/gaxios/issues/161)) ([b0f6a8b](https://www.github.com/googleapis/gaxios/commit/b0f6a8b)) + +### [2.0.2](https://www.github.com/googleapis/gaxios/compare/v2.0.1...v2.0.2) (2019-07-23) + + +### Bug Fixes + +* check for existence of fetch before using it ([#138](https://www.github.com/googleapis/gaxios/issues/138)) ([79eb58d](https://www.github.com/googleapis/gaxios/commit/79eb58d)) +* **docs:** make anchors work in jsdoc ([#139](https://www.github.com/googleapis/gaxios/issues/139)) ([85103bb](https://www.github.com/googleapis/gaxios/commit/85103bb)) +* prevent double option processing ([#142](https://www.github.com/googleapis/gaxios/issues/142)) ([19b4b3c](https://www.github.com/googleapis/gaxios/commit/19b4b3c)) diff --git a/node_modules/gaxios/LICENSE b/node_modules/gaxios/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/gaxios/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/gaxios/README.md b/node_modules/gaxios/README.md new file mode 100644 index 0000000..2b9b0c5 --- /dev/null +++ b/node_modules/gaxios/README.md @@ -0,0 +1,171 @@ +# gaxios + +[![npm version](https://img.shields.io/npm/v/gaxios.svg)](https://www.npmjs.org/package/gaxios) +[![codecov](https://codecov.io/gh/googleapis/gaxios/branch/master/graph/badge.svg)](https://codecov.io/gh/googleapis/gaxios) +[![Code Style: Google](https://img.shields.io/badge/code%20style-google-blueviolet.svg)](https://github.com/google/gts) + +> An HTTP request client that provides an `axios` like interface over top of `node-fetch`. + +## Install + +```sh +$ npm install gaxios +``` + +## Example + +```js +const {request} = require('gaxios'); +const res = await request({ + url: 'https://www.googleapis.com/discovery/v1/apis/', +}); +``` + +## Setting Defaults + +Gaxios supports setting default properties both on the default instance, and on additional instances. This is often useful when making many requests to the same domain with the same base settings. For example: + +```js +const gaxios = require('gaxios'); +gaxios.instance.defaults = { + baseURL: 'https://example.com' + headers: { + Authorization: 'SOME_TOKEN' + } +} +gaxios.request({url: '/data'}).then(...); +``` + +Note that setting default values will take precedence +over other authentication methods, i.e., application default credentials. + +## Request Options + +```js +{ + // The url to which the request should be sent. Required. + url: string, + + // The HTTP method to use for the request. Defaults to `GET`. + method: 'GET', + + // The base Url to use for the request. Prepended to the `url` property above. + baseURL: 'https://example.com'; + + // The HTTP methods to be sent with the request. + headers: { 'some': 'header' }, + + // The data to send in the body of the request. Data objects will be + // serialized as JSON. + // + // Note: if you would like to provide a Content-Type header other than + // application/json you you must provide a string or readable stream, rather + // than an object: + // data: JSON.stringify({some: 'data'}) + // data: fs.readFile('./some-data.jpeg') + data: { + some: 'data' + }, + + // The max size of the http response content in bytes allowed. + // Defaults to `0`, which is the same as unset. + maxContentLength: 2000, + + // The max number of HTTP redirects to follow. + // Defaults to 100. + maxRedirects: 100, + + // The querystring parameters that will be encoded using `qs` and + // appended to the url + params: { + querystring: 'parameters' + }, + + // By default, we use the `querystring` package in node core to serialize + // querystring parameters. You can override that and provide your + // own implementation. + paramsSerializer: (params) => { + return qs.stringify(params); + }, + + // The timeout for the HTTP request in milliseconds. Defaults to 0. + timeout: 1000, + + // Optional method to override making the actual HTTP request. Useful + // for writing tests and instrumentation + adapter?: async (options, defaultAdapter) => { + const res = await defaultAdapter(options); + res.data = { + ...res.data, + extraProperty: 'your extra property', + }; + return res; + }; + + // The expected return type of the request. Options are: + // json | stream | blob | arraybuffer | text | unknown + // Defaults to `unknown`. + responseType: 'unknown', + + // The node.js http agent to use for the request. + agent: someHttpsAgent, + + // Custom function to determine if the response is valid based on the + // status code. Defaults to (>= 200 && < 300) + validateStatus: (status: number) => true, + + // Implementation of `fetch` to use when making the API call. By default, + // will use the browser context if available, and fall back to `node-fetch` + // in node.js otherwise. + fetchImplementation?: typeof fetch; + + // Configuration for retrying of requests. + retryConfig: { + // The number of times to retry the request. Defaults to 3. + retry?: number; + + // The number of retries already attempted. + currentRetryAttempt?: number; + + // The HTTP Methods that will be automatically retried. + // Defaults to ['GET','PUT','HEAD','OPTIONS','DELETE'] + httpMethodsToRetry?: string[]; + + // The HTTP response status codes that will automatically be retried. + // Defaults to: [[100, 199], [429, 429], [500, 599]] + statusCodesToRetry?: number[][]; + + // Function to invoke when a retry attempt is made. + onRetryAttempt?: (err: GaxiosError) => Promise | void; + + // Function to invoke which determines if you should retry + shouldRetry?: (err: GaxiosError) => Promise | boolean; + + // When there is no response, the number of retries to attempt. Defaults to 2. + noResponseRetries?: number; + + // The amount of time to initially delay the retry, in ms. Defaults to 100ms. + retryDelay?: number; + }, + + // Enables default configuration for retries. + retry: boolean, + + // Cancelling a request requires the `abort-controller` library. + // See https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + signal?: AbortSignal + + /** + * An experimental, customizable error redactor. + * + * Set `false` to disable. + * + * @experimental + */ + errorRedactor?: typeof defaultErrorRedactor | false; +} +``` + +## License + +[Apache-2.0](https://github.com/googleapis/gaxios/blob/master/LICENSE) diff --git a/node_modules/gaxios/build/src/common.d.ts b/node_modules/gaxios/build/src/common.d.ts new file mode 100644 index 0000000..349fc29 --- /dev/null +++ b/node_modules/gaxios/build/src/common.d.ts @@ -0,0 +1,220 @@ +/// +/// +/// +import { Agent } from 'http'; +import { URL } from 'url'; +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +export declare const GAXIOS_ERROR_SYMBOL: unique symbol; +export declare class GaxiosError extends Error { + config: GaxiosOptions; + response?: GaxiosResponse | undefined; + error?: Error | NodeJS.ErrnoException | undefined; + /** + * An Error code. + * See {@link https://nodejs.org/api/errors.html#errorcode error.code} + * + * @example + * 'ECONNRESET' + */ + code?: string; + /** + * An HTTP Status code. + * See {@link https://developer.mozilla.org/en-US/docs/Web/API/Response/status Response: status property} + * + * @example + * 500 + */ + status?: number; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + [GAXIOS_ERROR_SYMBOL]: string; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [Symbol.hasInstance](instance: unknown): boolean; + constructor(message: string, config: GaxiosOptions, response?: GaxiosResponse | undefined, error?: Error | NodeJS.ErrnoException | undefined); +} +export interface Headers { + [index: string]: any; +} +export type GaxiosPromise = Promise>; +export interface GaxiosXMLHttpRequest { + responseURL: string; +} +export interface GaxiosResponse { + config: GaxiosOptions; + data: T; + status: number; + statusText: string; + headers: Headers; + request: GaxiosXMLHttpRequest; +} +/** + * Request options that are used to form the request. + */ +export interface GaxiosOptions { + /** + * Optional method to override making the actual HTTP request. Useful + * for writing tests. + */ + adapter?: (options: GaxiosOptions, defaultAdapter: (options: GaxiosOptions) => GaxiosPromise) => GaxiosPromise; + url?: string | URL; + /** + * @deprecated + */ + baseUrl?: string; + baseURL?: string | URL; + method?: 'GET' | 'HEAD' | 'POST' | 'DELETE' | 'PUT' | 'CONNECT' | 'OPTIONS' | 'TRACE' | 'PATCH'; + headers?: Headers; + data?: any; + body?: any; + /** + * The maximum size of the http response content in bytes allowed. + */ + maxContentLength?: number; + /** + * The maximum number of redirects to follow. Defaults to 20. + */ + maxRedirects?: number; + follow?: number; + params?: any; + paramsSerializer?: (params: { + [index: string]: string | number; + }) => string; + timeout?: number; + /** + * @deprecated ignored + */ + onUploadProgress?: (progressEvent: any) => void; + responseType?: 'arraybuffer' | 'blob' | 'json' | 'text' | 'stream' | 'unknown'; + agent?: Agent | ((parsedUrl: URL) => Agent); + validateStatus?: (status: number) => boolean; + retryConfig?: RetryConfig; + retry?: boolean; + signal?: any; + size?: number; + /** + * Implementation of `fetch` to use when making the API call. By default, + * will use the browser context if available, and fall back to `node-fetch` + * in node.js otherwise. + */ + fetchImplementation?: FetchImplementation; + cert?: string; + key?: string; + /** + * An experimental error redactor. + * + * @experimental + */ + errorRedactor?: typeof defaultErrorRedactor | false; +} +/** + * A partial object of `GaxiosResponse` with only redactable keys + * + * @experimental + */ +export type RedactableGaxiosOptions = Pick; +/** + * A partial object of `GaxiosResponse` with only redactable keys + * + * @experimental + */ +export type RedactableGaxiosResponse = Pick, 'config' | 'data' | 'headers'>; +/** + * Configuration for the Gaxios `request` method. + */ +export interface RetryConfig { + /** + * The number of times to retry the request. Defaults to 3. + */ + retry?: number; + /** + * The number of retries already attempted. + */ + currentRetryAttempt?: number; + /** + * The amount of time to initially delay the retry, in ms. Defaults to 100ms. + */ + retryDelay?: number; + /** + * The HTTP Methods that will be automatically retried. + * Defaults to ['GET','PUT','HEAD','OPTIONS','DELETE'] + */ + httpMethodsToRetry?: string[]; + /** + * The HTTP response status codes that will automatically be retried. + * Defaults to: [[100, 199], [429, 429], [500, 599]] + */ + statusCodesToRetry?: number[][]; + /** + * Function to invoke when a retry attempt is made. + */ + onRetryAttempt?: (err: GaxiosError) => Promise | void; + /** + * Function to invoke which determines if you should retry + */ + shouldRetry?: (err: GaxiosError) => Promise | boolean; + /** + * When there is no response, the number of retries to attempt. Defaults to 2. + */ + noResponseRetries?: number; + /** + * Function to invoke which returns a promise. After the promise resolves, + * the retry will be triggered. If provided, this will be used in-place of + * the `retryDelay` + */ + retryBackoff?: (err: GaxiosError, defaultBackoffMs: number) => Promise; +} +export type FetchImplementation = (input: FetchRequestInfo, init?: FetchRequestInit) => Promise; +export type FetchRequestInfo = any; +export interface FetchResponse { + readonly status: number; + readonly statusText: string; + readonly url: string; + readonly body: unknown | null; + arrayBuffer(): Promise; + blob(): Promise; + readonly headers: FetchHeaders; + json(): Promise; + text(): Promise; +} +export interface FetchRequestInit { + method?: string; +} +export interface FetchHeaders { + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + set(name: string, value: string): void; + forEach(callbackfn: (value: string, key: string) => void, thisArg?: any): void; +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +export declare function defaultErrorRedactor(data: { + config?: RedactableGaxiosOptions; + response?: RedactableGaxiosResponse; +}): { + config?: RedactableGaxiosOptions | undefined; + response?: RedactableGaxiosResponse | undefined; +}; diff --git a/node_modules/gaxios/build/src/common.js b/node_modules/gaxios/build/src/common.js new file mode 100644 index 0000000..6ee8a70 --- /dev/null +++ b/node_modules/gaxios/build/src/common.js @@ -0,0 +1,172 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +const url_1 = require("url"); +const util_1 = require("./util"); +const extend_1 = __importDefault(require("extend")); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/.test(text) || /assertion=/.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +exports.defaultErrorRedactor = defaultErrorRedactor; +//# sourceMappingURL=common.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/common.js.map b/node_modules/gaxios/build/src/common.js.map new file mode 100644 index 0000000..a70b275 --- /dev/null +++ b/node_modules/gaxios/build/src/common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"common.js","sourceRoot":"","sources":["../../src/common.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;;;;;AAGjC,6BAAwB;AAExB,iCAA2B;AAC3B,oDAA4B;AAE5B;;;;GAIG;AACU,QAAA,mBAAmB,GAAG,MAAM,CAAC,GAAG,CAAC,GAAG,UAAG,CAAC,IAAI,eAAe,CAAC,CAAC;AAE1E,iEAAiE;AACjE,MAAa,WAAqB,SAAQ,KAAK;IA6B7C;;;;;OAKG;IACH,MAAM,CAAC,OARN,2BAAmB,EAQZ,MAAM,CAAC,WAAW,EAAC,CAAC,QAAiB;QAC3C,IACE,QAAQ;YACR,OAAO,QAAQ,KAAK,QAAQ;YAC5B,2BAAmB,IAAI,QAAQ;YAC/B,QAAQ,CAAC,2BAAmB,CAAC,KAAK,UAAG,CAAC,OAAO,EAC7C,CAAC;YACD,OAAO,IAAI,CAAC;QACd,CAAC;QAED,qBAAqB;QACrB,OAAO,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;IAC5E,CAAC;IAED,YACE,OAAe,EACR,MAAqB,EACrB,QAA4B,EAC5B,KAAqC;;QAE5C,KAAK,CAAC,OAAO,CAAC,CAAC;QAJR,WAAM,GAAN,MAAM,CAAe;QACrB,aAAQ,GAAR,QAAQ,CAAoB;QAC5B,UAAK,GAAL,KAAK,CAAgC;QAnC9C;;;;;;;;WAQG;QACH,QAAqB,GAAG,UAAG,CAAC,OAAO,CAAC;QA8BlC,+CAA+C;QAC/C,6CAA6C;QAC7C,IAAI,CAAC,MAAM,GAAG,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;QACvC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAClB,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAChE,CAAC;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAClB,IAAI,CAAC;gBACH,IAAI,CAAC,QAAQ,CAAC,IAAI,GAAG,aAAa,CAChC,IAAI,CAAC,MAAM,CAAC,YAAY,EACxB,MAAA,IAAI,CAAC,QAAQ,0CAAE,IAAI,CACpB,CAAC;YACJ,CAAC;YAAC,WAAM,CAAC;gBACP,qDAAqD;gBACrD,oEAAoE;gBACpE,0DAA0D;YAC5D,CAAC;YAED,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC;QACrC,CAAC;QAED,IAAI,KAAK,IAAI,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;YAC3C,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;QACzB,CAAC;QAED,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;YACzB,MAAM,CAAC,aAAa,CAAI;gBACtB,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,QAAQ,EAAE,IAAI,CAAC,QAAQ;aACxB,CAAC,CAAC;QACL,CAAC;IACH,CAAC;CACF;AA1FD,kCA0FC;AA8MD,SAAS,aAAa,CAAC,YAAgC,EAAE,IAAS;IAChE,QAAQ,YAAY,EAAE,CAAC;QACrB,KAAK,QAAQ;YACX,OAAO,IAAI,CAAC;QACd,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC;QAC1C,KAAK,aAAa;YAChB,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;QACxD,KAAK,MAAM;YACT,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;QACjC;YACE,OAAO,IAAI,CAAC;IAChB,CAAC;AACH,CAAC;AAED;;;;;;;GAOG;AACH,SAAgB,oBAAoB,CAAU,IAG7C;IACC,MAAM,MAAM,GACV,0EAA0E,CAAC;IAE7E,SAAS,aAAa,CAAC,OAAiB;QACtC,IAAI,CAAC,OAAO;YAAE,OAAO;QAErB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC;YACvC,iCAAiC;YACjC,IAAI,kBAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;gBACjC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC;YACxB,CAAC;QACH,CAAC;IACH,CAAC;IAED,SAAS,YAAY,CAAC,GAAkB,EAAE,GAAwB;QAChE,IACE,OAAO,GAAG,KAAK,QAAQ;YACvB,GAAG,KAAK,IAAI;YACZ,OAAO,GAAG,CAAC,GAAG,CAAC,KAAK,QAAQ,EAC5B,CAAC;YACD,MAAM,IAAI,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;YAEtB,IAAI,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACxD,GAAG,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC;YACpB,CAAC;QACH,CAAC;IACH,CAAC;IAED,SAAS,YAAY,CAAkC,GAAM;QAC3D,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAC5C,IAAI,YAAY,IAAI,GAAG,EAAE,CAAC;gBACxB,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;YAC7B,CAAC;YAED,IAAI,WAAW,IAAI,GAAG,EAAE,CAAC;gBACvB,GAAG,CAAC,WAAW,CAAC,GAAG,MAAM,CAAC;YAC5B,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QAChB,aAAa,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QAEnC,YAAY,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAClC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAE/B,YAAY,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAClC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAE/B,IAAI,CAAC;YACH,MAAM,GAAG,GAAG,IAAI,SAAG,CAAC,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YAEzC,IAAI,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;gBAClC,GAAG,CAAC,YAAY,CAAC,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;QACnC,CAAC;QAAC,WAAM,CAAC;YACP,iDAAiD;QACnD,CAAC;IACH,CAAC;IAED,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;QAClB,oBAAoB,CAAC,EAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAC,CAAC,CAAC;QACrD,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAErC,YAAY,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QACpC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACnC,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AA3ED,oDA2EC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/gaxios.d.ts b/node_modules/gaxios/build/src/gaxios.d.ts new file mode 100644 index 0000000..09bcd06 --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.d.ts @@ -0,0 +1,51 @@ +/// +/// +import { Agent } from 'http'; +import { URL } from 'url'; +import { GaxiosOptions, GaxiosPromise } from './common'; +export declare class Gaxios { + protected agentCache: Map Agent)>; + /** + * Default HTTP options that will be used for every HTTP request. + */ + defaults: GaxiosOptions; + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults?: GaxiosOptions); + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + request(opts?: GaxiosOptions): GaxiosPromise; + private _defaultAdapter; + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + protected _request(opts?: GaxiosOptions): GaxiosPromise; + private getResponseData; + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + private validateOpts; + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + private validateStatus; + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + private paramsSerializer; + private translateResponse; + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + private getResponseDataFromContentType; +} diff --git a/node_modules/gaxios/build/src/gaxios.js b/node_modules/gaxios/build/src/gaxios.js new file mode 100644 index 0000000..c1ca2b7 --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.js @@ -0,0 +1,363 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Gaxios = void 0; +const extend_1 = __importDefault(require("extend")); +const https_1 = require("https"); +const node_fetch_1 = __importDefault(require("node-fetch")); +const querystring_1 = __importDefault(require("querystring")); +const is_stream_1 = __importDefault(require("is-stream")); +const url_1 = require("url"); +const common_1 = require("./common"); +const retry_1 = require("./retry"); +const https_proxy_agent_1 = require("https-proxy-agent"); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +let HttpsProxyAgent; +function loadProxy() { + var _a, _b, _c, _d; + const proxy = ((_a = process === null || process === void 0 ? void 0 : process.env) === null || _a === void 0 ? void 0 : _a.HTTPS_PROXY) || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.https_proxy) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.HTTP_PROXY) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.http_proxy); + if (proxy) { + HttpsProxyAgent = https_proxy_agent_1.HttpsProxyAgent; + } + return proxy; +} +loadProxy(); +function skipProxy(url) { + var _a; + const noProxyEnv = (_a = process.env.NO_PROXY) !== null && _a !== void 0 ? _a : process.env.no_proxy; + if (!noProxyEnv) { + return false; + } + const noProxyUrls = noProxyEnv.split(','); + const parsedURL = url instanceof url_1.URL ? url : new url_1.URL(url); + return !!noProxyUrls.find(url => { + if (url.startsWith('*.') || url.startsWith('.')) { + url = url.replace(/^\*\./, '.'); + return parsedURL.hostname.endsWith(url); + } + else { + return url === parsedURL.origin || url === parsedURL.hostname; + } + }); +} +// Figure out if we should be using a proxy. Only if it's required, load +// the https-proxy-agent module as it adds startup cost. +function getProxy(url) { + // If there is a match between the no_proxy env variables and the url, then do not proxy + if (skipProxy(url)) { + return undefined; + // If there is not a match between the no_proxy env variables and the url, check to see if there should be a proxy + } + else { + return loadProxy(); + } +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + this.agentCache = new Map(); + this.defaults = defaults || {}; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = this.validateOpts(opts); + return this._request(opts); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _a; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_a = err.config) === null || _a === void 0 ? void 0 : _a.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * Validates the options, and merges them with defaults. + * @param opts The original options passed from the client. + */ + validateOpts(options) { + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = getProxy(opts.url); + if (proxy) { + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + // Proxy is being used in conjunction with mTLS. + if (opts.cert && opts.key) { + const parsedURL = new url_1.URL(proxy); + opts.agent = new HttpsProxyAgent({ + port: parsedURL.port, + host: parsedURL.host, + protocol: parsedURL.protocol, + cert: opts.cert, + key: opts.key, + }); + } + else { + opts.agent = new HttpsProxyAgent(proxy); + } + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS: + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_a) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } +} +exports.Gaxios = Gaxios; +//# sourceMappingURL=gaxios.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/gaxios.js.map b/node_modules/gaxios/build/src/gaxios.js.map new file mode 100644 index 0000000..5c4303b --- /dev/null +++ b/node_modules/gaxios/build/src/gaxios.js.map @@ -0,0 +1 @@ +{"version":3,"file":"gaxios.js","sourceRoot":"","sources":["../../src/gaxios.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;;;;AAEjC,oDAA4B;AAE5B,iCAA0C;AAC1C,4DAAmC;AACnC,8DAA6B;AAC7B,0DAAiC;AACjC,6BAAwB;AAExB,qCAQkB;AAClB,mCAAuC;AAEvC,yDAAqE;AAErE,uDAAuD;AAEvD,MAAM,KAAK,GAAG,QAAQ,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,oBAAS,CAAC;AAEpD,SAAS,SAAS;IAChB,OAAO,OAAO,MAAM,KAAK,WAAW,IAAI,CAAC,CAAC,MAAM,CAAC;AACnD,CAAC;AAED,SAAS,QAAQ;IACf,OAAO,SAAS,EAAE,IAAI,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;AACvC,CAAC;AAED,SAAS,SAAS;IAChB,OAAO,OAAO,MAAM,KAAK,WAAW,CAAC;AACvC,CAAC;AAED,SAAS,SAAS,CAAC,OAAsB,EAAE,MAAc;IACvD,OAAO,CAAC,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AACtC,CAAC;AAED,SAAS,SAAS,CAAC,OAAsB,EAAE,MAAc;IACvD,MAAM,GAAG,MAAM,CAAC,WAAW,EAAE,CAAC;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,EAAE,CAAC,EAAE,CAAC;QACtD,IAAI,MAAM,KAAK,GAAG,CAAC,WAAW,EAAE,EAAE,CAAC;YACjC,OAAO,OAAO,CAAC,OAAQ,CAAC,GAAG,CAAC,CAAC;QAC/B,CAAC;IACH,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,IAAI,eAAoB,CAAC;AAEzB,SAAS,SAAS;;IAChB,MAAM,KAAK,GACT,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,WAAW;SACzB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,WAAW,CAAA;SACzB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,UAAU,CAAA;SACxB,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,GAAG,0CAAE,UAAU,CAAA,CAAC;IAC3B,IAAI,KAAK,EAAE,CAAC;QACV,eAAe,GAAG,mCAAe,CAAC;IACpC,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,EAAE,CAAC;AAEZ,SAAS,SAAS,CAAC,GAAiB;;IAClC,MAAM,UAAU,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,QAAQ,mCAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;IAChE,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAC;IACf,CAAC;IACD,MAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC1C,MAAM,SAAS,GAAG,GAAG,YAAY,SAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,SAAG,CAAC,GAAG,CAAC,CAAC;IAC1D,OAAO,CAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAC9B,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YAChD,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;QAC1C,CAAC;aAAM,CAAC;YACN,OAAO,GAAG,KAAK,SAAS,CAAC,MAAM,IAAI,GAAG,KAAK,SAAS,CAAC,QAAQ,CAAC;QAChE,CAAC;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,wEAAwE;AACxE,wDAAwD;AACxD,SAAS,QAAQ,CAAC,GAAiB;IACjC,wFAAwF;IACxF,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE,CAAC;QACnB,OAAO,SAAS,CAAC;QACjB,kHAAkH;IACpH,CAAC;SAAM,CAAC;QACN,OAAO,SAAS,EAAE,CAAC;IACrB,CAAC;AACH,CAAC;AAED,MAAa,MAAM;IAQjB;;;OAGG;IACH,YAAY,QAAwB;QAX1B,eAAU,GAAG,IAAI,GAAG,EAA+C,CAAC;QAY5E,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAC;IACjC,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO,CAAU,OAAsB,EAAE;QAC7C,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;QAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,IAAmB;QAEnB,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,IAAI,KAAK,CAAC;QACpD,MAAM,GAAG,GAAG,CAAC,MAAM,SAAS,CAAC,IAAI,CAAC,GAAI,EAAE,IAAI,CAAC,CAAkB,CAAC;QAChE,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;QACnD,OAAO,IAAI,CAAC,iBAAiB,CAAI,IAAI,EAAE,GAAG,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IAED;;;OAGG;IACO,KAAK,CAAC,QAAQ,CACtB,OAAsB,EAAE;;QAExB,IAAI,CAAC;YACH,IAAI,kBAAqC,CAAC;YAC1C,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;gBACjB,kBAAkB,GAAG,MAAM,IAAI,CAAC,OAAO,CACrC,IAAI,EACJ,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAChC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,kBAAkB,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;YACxD,CAAC;YAED,IAAI,CAAC,IAAI,CAAC,cAAe,CAAC,kBAAkB,CAAC,MAAM,CAAC,EAAE,CAAC;gBACrD,IAAI,IAAI,CAAC,YAAY,KAAK,QAAQ,EAAE,CAAC;oBACnC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE;wBAC1B,CAAC,kBAAkB,aAAlB,kBAAkB,uBAAlB,kBAAkB,CAAE,IAAe,CAAA,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE;4BACtD,QAAQ,IAAI,KAAK,CAAC;wBACpB,CAAC,CAAC,CAAC;wBACH,CAAC,kBAAkB,aAAlB,kBAAkB,uBAAlB,kBAAkB,CAAE,IAAe,CAAA,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;oBAC1D,CAAC,CAAC,CAAC;oBACH,kBAAkB,CAAC,IAAI,GAAG,QAAa,CAAC;gBAC1C,CAAC;gBACD,MAAM,IAAI,oBAAW,CACnB,mCAAmC,kBAAkB,CAAC,MAAM,EAAE,EAC9D,IAAI,EACJ,kBAAkB,CACnB,CAAC;YACJ,CAAC;YACD,OAAO,kBAAkB,CAAC;QAC5B,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,GAAG,GACP,CAAC,YAAY,oBAAW;gBACtB,CAAC,CAAC,CAAC;gBACH,CAAC,CAAC,IAAI,oBAAW,CAAE,CAAW,CAAC,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,CAAU,CAAC,CAAC;YAEzE,MAAM,EAAC,WAAW,EAAE,MAAM,EAAC,GAAG,MAAM,IAAA,sBAAc,EAAC,GAAG,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,MAAM,EAAE,CAAC;gBAC1B,GAAG,CAAC,MAAM,CAAC,WAAY,CAAC,mBAAmB;oBACzC,MAAM,CAAC,WAAY,CAAC,mBAAmB,CAAC;gBAE1C,mEAAmE;gBACnE,mDAAmD;gBACnD,IAAI,CAAC,WAAW,GAAG,MAAA,GAAG,CAAC,MAAM,0CAAE,WAAW,CAAC;gBAE3C,OAAO,IAAI,CAAC,QAAQ,CAAI,IAAI,CAAC,CAAC;YAChC,CAAC;YACD,MAAM,GAAG,CAAC;QACZ,CAAC;IACH,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,IAAmB,EACnB,GAAkB;QAElB,QAAQ,IAAI,CAAC,YAAY,EAAE,CAAC;YAC1B,KAAK,QAAQ;gBACX,OAAO,GAAG,CAAC,IAAI,CAAC;YAClB,KAAK,MAAM,CAAC,CAAC,CAAC;gBACZ,IAAI,IAAI,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC;gBAC5B,IAAI,CAAC;oBACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAC1B,CAAC;gBAAC,WAAM,CAAC;oBACP,WAAW;gBACb,CAAC;gBACD,OAAO,IAAU,CAAC;YACpB,CAAC;YACD,KAAK,aAAa;gBAChB,OAAO,GAAG,CAAC,WAAW,EAAE,CAAC;YAC3B,KAAK,MAAM;gBACT,OAAO,GAAG,CAAC,IAAI,EAAE,CAAC;YACpB,KAAK,MAAM;gBACT,OAAO,GAAG,CAAC,IAAI,EAAE,CAAC;YACpB;gBACE,OAAO,IAAI,CAAC,8BAA8B,CAAC,GAAG,CAAC,CAAC;QACpD,CAAC;IACH,CAAC;IAED;;;OAGG;IACK,YAAY,CAAC,OAAsB;QACzC,MAAM,IAAI,GAAG,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACtD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;YACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAC;QACtC,CAAC;QAED,6CAA6C;QAC7C,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;QAC7C,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC;QAC3C,CAAC;QAED,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,gBAAgB,CAAC;QACvE,IAAI,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACvD,IAAI,qBAAqB,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC/D,IAAI,qBAAqB,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;gBAC1C,qBAAqB,GAAG,qBAAqB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YACzD,CAAC;YACD,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;YAC7D,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,qBAAqB,CAAC;QACvD,CAAC;QAED,IAAI,OAAO,OAAO,CAAC,gBAAgB,KAAK,QAAQ,EAAE,CAAC;YACjD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACvC,CAAC;QAED,IAAI,OAAO,OAAO,CAAC,YAAY,KAAK,QAAQ,EAAE,CAAC;YAC7C,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC;QACrC,CAAC;QAED,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,EAAE,CAAC;QAClC,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;YACd,MAAM,UAAU,GACd,OAAO,QAAQ,KAAK,WAAW;gBAC7B,CAAC,CAAC,KAAK;gBACP,CAAC,CAAC,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,aAAY,QAAQ,CAAC;YACrC,IAAI,mBAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACjC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;YACxB,CAAC;iBAAM,IAAI,SAAS,EAAE,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACrD,+CAA+C;gBAC/C,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;gBACtB,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE,CAAC;oBACrC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;gBACpD,CAAC;YACH,CAAC;iBAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBACzC,gEAAgE;gBAChE,kEAAkE;gBAClE,IAAI,CAAC,UAAU,EAAE,CAAC;oBAChB,IACE,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC;wBAC/B,mCAAmC,EACnC,CAAC;wBACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAC/C,CAAC;yBAAM,CAAC;wBACN,iDAAiD;wBACjD,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE,CAAC;4BACrC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;wBACpD,CAAC;wBACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACxC,CAAC;gBACH,CAAC;YACH,CAAC;iBAAM,CAAC;gBACN,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;YACxB,CAAC;QACH,CAAC;QAED,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,IAAI,SAAS,CAAC;QACnD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;YAC5D,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,kBAAkB,CAAC;QAC9C,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,KAAK,CAAC;QAEnC,MAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACjC,IAAI,KAAK,EAAE,CAAC;YACV,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;gBAC/B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAC1C,CAAC;iBAAM,CAAC;gBACN,gDAAgD;gBAChD,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;oBAC1B,MAAM,SAAS,GAAG,IAAI,SAAG,CAAC,KAAK,CAAC,CAAC;oBACjC,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC;wBAC/B,IAAI,EAAE,SAAS,CAAC,IAAI;wBACpB,IAAI,EAAE,SAAS,CAAC,IAAI;wBACpB,QAAQ,EAAE,SAAS,CAAC,QAAQ;wBAC5B,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,GAAG,EAAE,IAAI,CAAC,GAAG;qBACd,CAAC,CAAC;gBACL,CAAC;qBAAM,CAAC;oBACN,IAAI,CAAC,KAAK,GAAG,IAAI,eAAe,CAAC,KAAK,CAAC,CAAC;gBAC1C,CAAC;gBACD,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,KAAM,CAAC,CAAC;YAC1C,CAAC;QACH,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC;YACjC,6BAA6B;YAC7B,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;gBAClC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAC7C,CAAC;iBAAM,CAAC;gBACN,IAAI,CAAC,KAAK,GAAG,IAAI,aAAU,CAAC;oBAC1B,IAAI,EAAE,IAAI,CAAC,IAAI;oBACf,GAAG,EAAE,IAAI,CAAC,GAAG;iBACd,CAAC,CAAC;gBACH,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,KAAM,CAAC,CAAC;YAC7C,CAAC;QACH,CAAC;QAED,IACE,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU;YACxC,IAAI,CAAC,aAAa,KAAK,KAAK,EAC5B,CAAC;YACD,IAAI,CAAC,aAAa,GAAG,6BAAoB,CAAC;QAC5C,CAAC;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACK,cAAc,CAAC,MAAc;QACnC,OAAO,MAAM,IAAI,GAAG,IAAI,MAAM,GAAG,GAAG,CAAC;IACvC,CAAC;IAED;;;OAGG;IACK,gBAAgB,CAAC,MAA0C;QACjE,OAAO,qBAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC9B,CAAC;IAEO,iBAAiB,CACvB,IAAmB,EACnB,GAAkB,EAClB,IAAQ;QAER,oDAAoD;QACpD,MAAM,OAAO,GAAG,EAAa,CAAC;QAC9B,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;YACjC,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;QACvB,CAAC,CAAC,CAAC;QAEH,OAAO;YACL,MAAM,EAAE,IAAI;YACZ,IAAI,EAAE,IAAS;YACf,OAAO;YACP,MAAM,EAAE,GAAG,CAAC,MAAM;YAClB,UAAU,EAAE,GAAG,CAAC,UAAU;YAE1B,qBAAqB;YACrB,OAAO,EAAE;gBACP,WAAW,EAAE,GAAG,CAAC,GAAG;aACrB;SACF,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACK,KAAK,CAAC,8BAA8B,CAC1C,QAAuB;QAEvB,IAAI,WAAW,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QACvD,IAAI,WAAW,KAAK,IAAI,EAAE,CAAC;YACzB,oDAAoD;YACpD,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;QACzB,CAAC;QACD,WAAW,GAAG,WAAW,CAAC,WAAW,EAAE,CAAC;QACxC,IAAI,WAAW,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EAAE,CAAC;YAC7C,IAAI,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACjC,IAAI,CAAC;gBACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC1B,CAAC;YAAC,WAAM,CAAC;gBACP,WAAW;YACb,CAAC;YACD,OAAO,IAAU,CAAC;QACpB,CAAC;aAAM,IAAI,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC;YACxC,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;QACzB,CAAC;aAAM,CAAC;YACN,uFAAuF;YACvF,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;QACzB,CAAC;IACH,CAAC;CACF;AApTD,wBAoTC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/index.d.ts b/node_modules/gaxios/build/src/index.d.ts new file mode 100644 index 0000000..033aff5 --- /dev/null +++ b/node_modules/gaxios/build/src/index.d.ts @@ -0,0 +1,14 @@ +import { GaxiosOptions } from './common'; +import { Gaxios } from './gaxios'; +export { GaxiosError, GaxiosPromise, GaxiosResponse, Headers, RetryConfig, } from './common'; +export { Gaxios, GaxiosOptions }; +/** + * The default instance used when the `request` method is directly + * invoked. + */ +export declare const instance: Gaxios; +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +export declare function request(opts: GaxiosOptions): Promise>; diff --git a/node_modules/gaxios/build/src/index.js b/node_modules/gaxios/build/src/index.js new file mode 100644 index 0000000..f103f1a --- /dev/null +++ b/node_modules/gaxios/build/src/index.js @@ -0,0 +1,33 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +const gaxios_1 = require("./gaxios"); +Object.defineProperty(exports, "Gaxios", { enumerable: true, get: function () { return gaxios_1.Gaxios; } }); +var common_1 = require("./common"); +Object.defineProperty(exports, "GaxiosError", { enumerable: true, get: function () { return common_1.GaxiosError; } }); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +exports.request = request; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/index.js.map b/node_modules/gaxios/build/src/index.js.map new file mode 100644 index 0000000..609c0bc --- /dev/null +++ b/node_modules/gaxios/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAGjC,qCAAgC;AASxB,uFATA,eAAM,OASA;AAPd,mCAMkB;AALhB,qGAAA,WAAW,OAAA;AAQb;;;GAGG;AACU,QAAA,QAAQ,GAAG,IAAI,eAAM,EAAE,CAAC;AAErC;;;GAGG;AACI,KAAK,UAAU,OAAO,CAAI,IAAmB;IAClD,OAAO,gBAAQ,CAAC,OAAO,CAAI,IAAI,CAAC,CAAC;AACnC,CAAC;AAFD,0BAEC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/retry.d.ts b/node_modules/gaxios/build/src/retry.d.ts new file mode 100644 index 0000000..cfc5ee2 --- /dev/null +++ b/node_modules/gaxios/build/src/retry.d.ts @@ -0,0 +1,8 @@ +import { GaxiosError } from './common'; +export declare function getRetryConfig(err: GaxiosError): Promise<{ + shouldRetry: boolean; + config?: undefined; +} | { + shouldRetry: boolean; + config: import("./common").GaxiosOptions; +}>; diff --git a/node_modules/gaxios/build/src/retry.js b/node_modules/gaxios/build/src/retry.js new file mode 100644 index 0000000..2761c97 --- /dev/null +++ b/node_modules/gaxios/build/src/retry.js @@ -0,0 +1,139 @@ +"use strict"; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRetryConfig = void 0; +async function getRetryConfig(err) { + var _a; + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((2^c - 1 / 2) * 1000) + const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +exports.getRetryConfig = getRetryConfig; +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +//# sourceMappingURL=retry.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/retry.js.map b/node_modules/gaxios/build/src/retry.js.map new file mode 100644 index 0000000..b6499d7 --- /dev/null +++ b/node_modules/gaxios/build/src/retry.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry.js","sourceRoot":"","sources":["../../src/retry.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAI1B,KAAK,UAAU,cAAc,CAAC,GAAgB;;IACnD,IAAI,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC;QAC1D,OAAO,EAAC,WAAW,EAAE,KAAK,EAAC,CAAC;IAC9B,CAAC;IACD,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IACtB,MAAM,CAAC,mBAAmB,GAAG,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC;IAC7D,MAAM,CAAC,KAAK;QACV,MAAM,CAAC,KAAK,KAAK,SAAS,IAAI,MAAM,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IACzE,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,IAAI;QACvD,KAAK;QACL,MAAM;QACN,KAAK;QACL,SAAS;QACT,QAAQ;KACT,CAAC;IACF,MAAM,CAAC,iBAAiB;QACtB,MAAM,CAAC,iBAAiB,KAAK,SAAS,IAAI,MAAM,CAAC,iBAAiB,KAAK,IAAI;YACzE,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAC;IAE/B,2DAA2D;IAC3D,kCAAkC;IAClC,MAAM,WAAW,GAAG;QAClB,0DAA0D;QAC1D,wDAAwD;QACxD,+BAA+B;QAC/B,gCAAgC;QAChC,qCAAqC;QACrC,oCAAoC;QACpC,8BAA8B;QAC9B,CAAC,GAAG,EAAE,GAAG,CAAC;QACV,CAAC,GAAG,EAAE,GAAG,CAAC;QACV,CAAC,GAAG,EAAE,GAAG,CAAC;KACX,CAAC;IACF,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,kBAAkB,IAAI,WAAW,CAAC;IAErE,mCAAmC;IACnC,GAAG,CAAC,MAAM,CAAC,WAAW,GAAG,MAAM,CAAC;IAEhC,2CAA2C;IAC3C,MAAM,aAAa,GAAG,MAAM,CAAC,WAAW,IAAI,kBAAkB,CAAC;IAC/D,IAAI,CAAC,CAAC,MAAM,aAAa,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC;QAChC,OAAO,EAAC,WAAW,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAC,CAAC;IAClD,CAAC;IAED,mDAAmD;IACnD,gEAAgE;IAChE,MAAM,UAAU,GAAG,MAAM,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAA,MAAM,CAAC,UAAU,mCAAI,GAAG,CAAC;IAC7E,+CAA+C;IAC/C,MAAM,KAAK,GACT,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC;IAE1E,kDAAkD;IAClD,GAAG,CAAC,MAAM,CAAC,WAAY,CAAC,mBAAoB,IAAI,CAAC,CAAC;IAElD,iEAAiE;IACjE,MAAM,OAAO,GAAG,MAAM,CAAC,YAAY;QACjC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC;QACjC,CAAC,CAAC,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE;YACpB,UAAU,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IAEP,4DAA4D;IAC5D,IAAI,MAAM,CAAC,cAAc,EAAE,CAAC;QAC1B,MAAM,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;IAC7B,CAAC;IAED,kEAAkE;IAClE,MAAM,OAAO,CAAC;IACd,OAAO,EAAC,WAAW,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAC,CAAC;AACjD,CAAC;AAvED,wCAuEC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,GAAgB;;IAC1C,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC;IAE9B,+CAA+C;IAC/C,6EAA6E;IAC7E,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,IAAI,CAAA,MAAA,GAAG,CAAC,KAAK,0CAAE,IAAI,MAAK,YAAY,EAAE,CAAC;QAClE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,yDAAyD;IACzD,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,KAAK,KAAK,CAAC,EAAE,CAAC;QAClC,OAAO,KAAK,CAAC;IACf,CAAC;IAED,kEAAkE;IAClE,IACE,CAAC,GAAG,CAAC,QAAQ;QACb,CAAC,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC,IAAI,MAAM,CAAC,iBAAkB,EAC9D,CAAC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0CAA0C;IAC1C,IACE,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM;QAClB,MAAM,CAAC,kBAAmB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,GAAG,CAAC,EACvE,CAAC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,2DAA2D;IAC3D,kCAAkC;IAClC,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;QACxC,IAAI,SAAS,GAAG,KAAK,CAAC;QACtB,KAAK,MAAM,CAAC,GAAG,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,kBAAmB,EAAE,CAAC;YACpD,MAAM,MAAM,GAAG,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;YACnC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,GAAG,EAAE,CAAC;gBACnC,SAAS,GAAG,IAAI,CAAC;gBACjB,MAAM;YACR,CAAC;QACH,CAAC;QACD,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED,0CAA0C;IAC1C,MAAM,CAAC,mBAAmB,GAAG,MAAM,CAAC,mBAAmB,IAAI,CAAC,CAAC;IAC7D,IAAI,MAAM,CAAC,mBAAmB,IAAI,MAAM,CAAC,KAAM,EAAE,CAAC;QAChD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,GAAgB;IACjC,IAAI,GAAG,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC;QAChD,OAAO,GAAG,CAAC,MAAM,CAAC,WAAW,CAAC;IAChC,CAAC;IACD,OAAO;AACT,CAAC"} \ No newline at end of file diff --git a/node_modules/gaxios/build/src/util.d.ts b/node_modules/gaxios/build/src/util.d.ts new file mode 100644 index 0000000..53d5b74 --- /dev/null +++ b/node_modules/gaxios/build/src/util.d.ts @@ -0,0 +1,4 @@ +export declare const pkg: { + name: string; + version: string; +}; diff --git a/node_modules/gaxios/build/src/util.js b/node_modules/gaxios/build/src/util.js new file mode 100644 index 0000000..be41844 --- /dev/null +++ b/node_modules/gaxios/build/src/util.js @@ -0,0 +1,17 @@ +"use strict"; +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pkg = void 0; +exports.pkg = require('../../package.json'); +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/gaxios/build/src/util.js.map b/node_modules/gaxios/build/src/util.js.map new file mode 100644 index 0000000..5750839 --- /dev/null +++ b/node_modules/gaxios/build/src/util.js.map @@ -0,0 +1 @@ +{"version":3,"file":"util.js","sourceRoot":"","sources":["../../src/util.ts"],"names":[],"mappings":";AAAA,4BAA4B;AAC5B,kEAAkE;AAClE,mEAAmE;AACnE,0CAA0C;AAC1C,EAAE;AACF,gDAAgD;AAChD,EAAE;AACF,sEAAsE;AACtE,oEAAoE;AACpE,2EAA2E;AAC3E,sEAAsE;AACtE,iCAAiC;;;AAEpB,QAAA,GAAG,GAGZ,OAAO,CAAC,oBAAoB,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/gaxios/package.json b/node_modules/gaxios/package.json new file mode 100644 index 0000000..1831573 --- /dev/null +++ b/node_modules/gaxios/package.json @@ -0,0 +1,95 @@ +{ + "name": "gaxios", + "version": "6.3.0", + "description": "A simple common HTTP client specifically for Google APIs and services.", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "files": [ + "build/src" + ], + "scripts": { + "lint": "gts check", + "test": "c8 mocha build/test", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test --timeout 80000", + "compile": "tsc -p .", + "fix": "gts fix", + "prepare": "npm run compile", + "pretest": "npm run compile", + "webpack": "webpack", + "prebrowser-test": "npm run compile", + "browser-test": "node build/browser-test/browser-test-runner.js", + "docs": "compodoc src/", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "repository": "googleapis/gaxios", + "keywords": [ + "google" + ], + "engines": { + "node": ">=14" + }, + "author": "Google, LLC", + "license": "Apache-2.0", + "devDependencies": { + "@babel/plugin-proposal-private-methods": "^7.18.6", + "@compodoc/compodoc": "^1.1.9", + "@types/cors": "^2.8.6", + "@types/express": "^4.16.1", + "@types/extend": "^3.0.1", + "@types/mocha": "^9.0.0", + "@types/multiparty": "0.0.36", + "@types/mv": "^2.1.0", + "@types/ncp": "^2.0.1", + "@types/node": "^20.0.0", + "@types/node-fetch": "^2.5.7", + "@types/sinon": "^17.0.0", + "@types/tmp": "0.2.6", + "@types/uuid": "^9.0.0", + "abort-controller": "^3.0.0", + "assert": "^2.0.0", + "browserify": "^17.0.0", + "c8": "^8.0.0", + "cors": "^2.8.5", + "execa": "^5.0.0", + "express": "^4.16.4", + "form-data": "^4.0.0", + "gts": "^5.0.0", + "is-docker": "^2.0.0", + "karma": "^6.0.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-firefox-launcher": "^2.0.0", + "karma-mocha": "^2.0.0", + "karma-remap-coverage": "^0.1.5", + "karma-sourcemap-loader": "^0.4.0", + "karma-webpack": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^8.0.0", + "multiparty": "^4.2.1", + "mv": "^2.1.1", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "null-loader": "^4.0.0", + "puppeteer": "^21.0.0", + "sinon": "^17.0.0", + "stream-browserify": "^3.0.0", + "tmp": "0.2.1", + "ts-loader": "^8.0.0", + "typescript": "^5.1.6", + "uuid": "^9.0.0", + "webpack": "^5.35.0", + "webpack-cli": "^4.0.0" + }, + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9" + } +} diff --git a/node_modules/gcp-metadata/CHANGELOG.md b/node_modules/gcp-metadata/CHANGELOG.md new file mode 100644 index 0000000..a429861 --- /dev/null +++ b/node_modules/gcp-metadata/CHANGELOG.md @@ -0,0 +1,455 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/gcp-metadata?activeTab=versions + +## [6.1.0](https://github.com/googleapis/gcp-metadata/compare/v6.0.0...v6.1.0) (2023-11-10) + + +### Features + +* Add `universe` metadata handler ([#596](https://github.com/googleapis/gcp-metadata/issues/596)) ([0c02016](https://github.com/googleapis/gcp-metadata/commit/0c02016756754cddde6c4402fac1ceb6a318e82d)) +* Bulk Metadata Requests ([#598](https://github.com/googleapis/gcp-metadata/issues/598)) ([0a51378](https://github.com/googleapis/gcp-metadata/commit/0a513788537173570f9910d368dd36717de7233b)) + + +### Bug Fixes + +* Repo Metadata ([#595](https://github.com/googleapis/gcp-metadata/issues/595)) ([470a872](https://github.com/googleapis/gcp-metadata/commit/470a8722df2b2fb2da1b076b73414d2e28a3ff4e)) + +## [6.0.0](https://github.com/googleapis/gcp-metadata/compare/v5.3.0...v6.0.0) (2023-07-17) + + +### ⚠ BREAKING CHANGES + +* upgrade to Node 14, and update gaxios, ts, and gts ([#571](https://github.com/googleapis/gcp-metadata/issues/571)) + +### Miscellaneous Chores + +* Upgrade to Node 14, and update gaxios, ts, and gts ([#571](https://github.com/googleapis/gcp-metadata/issues/571)) ([88ff3ff](https://github.com/googleapis/gcp-metadata/commit/88ff3ff3d9bd8be32126e7fe76cbf33e401f8db7)) + +## [5.3.0](https://github.com/googleapis/gcp-metadata/compare/v5.2.0...v5.3.0) (2023-06-28) + + +### Features + +* Metadata Server Detection Configuration ([#562](https://github.com/googleapis/gcp-metadata/issues/562)) ([8c7c715](https://github.com/googleapis/gcp-metadata/commit/8c7c715f1fc22ad65554a745a93915713ca6698f)) + +## [5.2.0](https://github.com/googleapis/gcp-metadata/compare/v5.1.0...v5.2.0) (2023-01-03) + + +### Features + +* Export `gcp-residency` tools ([#552](https://github.com/googleapis/gcp-metadata/issues/552)) ([ba9ae24](https://github.com/googleapis/gcp-metadata/commit/ba9ae24331b53199f81e97b6a88414050cfcf546)) + +## [5.1.0](https://github.com/googleapis/gcp-metadata/compare/v5.0.1...v5.1.0) (2022-12-07) + + +### Features + +* Extend GCP Residency Detection Support ([#528](https://github.com/googleapis/gcp-metadata/issues/528)) ([2b35bb0](https://github.com/googleapis/gcp-metadata/commit/2b35bb0e6fb1a18294aeeebba91a6bf7b400385a)) + +## [5.0.1](https://github.com/googleapis/gcp-metadata/compare/v5.0.0...v5.0.1) (2022-09-09) + + +### Bug Fixes + +* Remove pip install statements ([#1546](https://github.com/googleapis/gcp-metadata/issues/1546)) ([#529](https://github.com/googleapis/gcp-metadata/issues/529)) ([064c64c](https://github.com/googleapis/gcp-metadata/commit/064c64cec160ffe645e6946a5125960e3e269d7f)) + +## [5.0.0](https://github.com/googleapis/gcp-metadata/compare/v4.3.1...v5.0.0) (2022-04-22) + + +### ⚠ BREAKING CHANGES + +* drop node 10, update typescript to 4.6.3 (#519) + +### Build System + +* drop node 10, update typescript to 4.6.3 ([#519](https://github.com/googleapis/gcp-metadata/issues/519)) ([688749b](https://github.com/googleapis/gcp-metadata/commit/688749bc50407f3cd127a0b10ae09487d6fe5aea)) + +### [4.3.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.3.0...v4.3.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch primary branch to main ([#481](https://www.github.com/googleapis/gcp-metadata/issues/481)) ([8a7965c](https://www.github.com/googleapis/gcp-metadata/commit/8a7965c47c077ef766e4b416358630c0b24b0af2)) + +## [4.3.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.2.1...v4.3.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#450](https://www.github.com/googleapis/gcp-metadata/issues/450)) ([6a0f9ad](https://www.github.com/googleapis/gcp-metadata/commit/6a0f9ad09b6d16370d08c5d60541ce3ef64a9f97)) + +### [4.2.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.2.0...v4.2.1) (2020-10-29) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#420](https://www.github.com/googleapis/gcp-metadata/issues/420)) ([b99fb07](https://www.github.com/googleapis/gcp-metadata/commit/b99fb0764b8dbb8b083f73b8007816914db4f09a)) + +## [4.2.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.4...v4.2.0) (2020-09-15) + + +### Features + +* add support for GCE_METADATA_HOST environment variable ([#406](https://www.github.com/googleapis/gcp-metadata/issues/406)) ([eaf128a](https://www.github.com/googleapis/gcp-metadata/commit/eaf128ad5afc4357cde72d19b017b9474c070fea)) + +### [4.1.4](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.3...v4.1.4) (2020-07-15) + + +### Bug Fixes + +* **deps:** update dependency json-bigint to v1 ([#382](https://www.github.com/googleapis/gcp-metadata/issues/382)) ([ab4d8c3](https://www.github.com/googleapis/gcp-metadata/commit/ab4d8c3022903206d433bafc47c27815c6f85e36)) + +### [4.1.3](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.2...v4.1.3) (2020-07-13) + + +### Bug Fixes + +* **deps:** update dependency json-bigint to ^0.4.0 ([#378](https://www.github.com/googleapis/gcp-metadata/issues/378)) ([b214280](https://www.github.com/googleapis/gcp-metadata/commit/b2142807928c8c032509277900d35fccd1023f0f)) + +### [4.1.2](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.1...v4.1.2) (2020-07-10) + + +### Bug Fixes + +* **deps:** roll back dependency gcp-metadata to ^4.1.0 ([#373](https://www.github.com/googleapis/gcp-metadata/issues/373)) ([a45adef](https://www.github.com/googleapis/gcp-metadata/commit/a45adefd92418faa08c8a5014cedb844d1eb3ae6)) + +### [4.1.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.1.0...v4.1.1) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#371](https://www.github.com/googleapis/gcp-metadata/issues/371)) ([5b4bb1c](https://www.github.com/googleapis/gcp-metadata/commit/5b4bb1c85e67e3ef0a6d1ec2ea316d560e03092f)) + +## [4.1.0](https://www.github.com/googleapis/gcp-metadata/compare/v4.0.1...v4.1.0) (2020-05-05) + + +### Features + +* Introduces the GCE_METADATA_IP to allow using a different IP address for the GCE metadata server. ([#346](https://www.github.com/googleapis/gcp-metadata/issues/346)) ([ec0f82d](https://www.github.com/googleapis/gcp-metadata/commit/ec0f82d022b4b3aac95e94ee1d8e53cfac3b14a4)) + + +### Bug Fixes + +* do not check secondary host if GCE_METADATA_IP set ([#352](https://www.github.com/googleapis/gcp-metadata/issues/352)) ([64fa7d6](https://www.github.com/googleapis/gcp-metadata/commit/64fa7d68cbb76f455a3bfdcb27d58e7775eb789a)) +* warn rather than throwing when we fail to connect to metadata server ([#351](https://www.github.com/googleapis/gcp-metadata/issues/351)) ([754a6c0](https://www.github.com/googleapis/gcp-metadata/commit/754a6c07d1a72615cbb5ebf9ee04475a9a12f1c0)) + +### [4.0.1](https://www.github.com/googleapis/gcp-metadata/compare/v4.0.0...v4.0.1) (2020-04-14) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v3 ([#326](https://www.github.com/googleapis/gcp-metadata/issues/326)) ([5667178](https://www.github.com/googleapis/gcp-metadata/commit/5667178429baff71ad5dab2a96f97f27b2106d57)) +* apache license URL ([#468](https://www.github.com/googleapis/gcp-metadata/issues/468)) ([#336](https://www.github.com/googleapis/gcp-metadata/issues/336)) ([195dcd2](https://www.github.com/googleapis/gcp-metadata/commit/195dcd2d227ba496949e7ec0dcd77e5b9269066c)) + +## [4.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.5.0...v4.0.0) (2020-03-19) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7.x has breaking changes; compiler now targets es2015 +* drops Node 8 from engines field (#315) + +### Features + +* drops Node 8 from engines field ([#315](https://www.github.com/googleapis/gcp-metadata/issues/315)) ([acb6233](https://www.github.com/googleapis/gcp-metadata/commit/acb62337e8ba7f0b259ae4e553f19c5786207d84)) + + +### Build System + +* switch to latest typescirpt/gts ([#317](https://www.github.com/googleapis/gcp-metadata/issues/317)) ([fbb7158](https://www.github.com/googleapis/gcp-metadata/commit/fbb7158be62c9f1949b69079e35113be1e10495c)) + +## [3.5.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.4.0...v3.5.0) (2020-03-03) + + +### Features + +* add ECONNREFUSED to list of known errors for isAvailable() ([#309](https://www.github.com/googleapis/gcp-metadata/issues/309)) ([17ff6ea](https://www.github.com/googleapis/gcp-metadata/commit/17ff6ea361d02de31463532d4ab4040bf6276e0b)) + +## [3.4.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.3.1...v3.4.0) (2020-02-24) + + +### Features + +* significantly increase timeout if GCF environment detected ([#300](https://www.github.com/googleapis/gcp-metadata/issues/300)) ([8e507c6](https://www.github.com/googleapis/gcp-metadata/commit/8e507c645f69a11f508884b3181dc4414e579fcc)) + +### [3.3.1](https://www.github.com/googleapis/gcp-metadata/compare/v3.3.0...v3.3.1) (2020-01-30) + + +### Bug Fixes + +* **isAvailable:** handle EHOSTDOWN and EHOSTUNREACH error codes ([#291](https://www.github.com/googleapis/gcp-metadata/issues/291)) ([ba8d9f5](https://www.github.com/googleapis/gcp-metadata/commit/ba8d9f50eac6cf8b439c1b66c48ace146c75f6e2)) + +## [3.3.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.3...v3.3.0) (2019-12-16) + + +### Features + +* add environment variable for configuring environment detection ([#275](https://www.github.com/googleapis/gcp-metadata/issues/275)) ([580cfa4](https://www.github.com/googleapis/gcp-metadata/commit/580cfa4a5f5d0041aa09ae85cfc5a4575dd3957f)) +* cache response from isAvailable() method ([#274](https://www.github.com/googleapis/gcp-metadata/issues/274)) ([a05e13f](https://www.github.com/googleapis/gcp-metadata/commit/a05e13f1d1d61b1f9b9b1703bc37cdbdc022c93b)) + + +### Bug Fixes + +* fastFailMetadataRequest should not reject, if response already happened ([#273](https://www.github.com/googleapis/gcp-metadata/issues/273)) ([a6590c4](https://www.github.com/googleapis/gcp-metadata/commit/a6590c4fd8bc2dff3995c83d4c9175d5bd9f5e4a)) + +### [3.2.3](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.2...v3.2.3) (2019-12-12) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([e4bf622](https://www.github.com/googleapis/gcp-metadata/commit/e4bf622e6654a51ddffc0921a15250130591db2f)) + +### [3.2.2](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.1...v3.2.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#264](https://www.github.com/googleapis/gcp-metadata/issues/264)) ([af8362b](https://www.github.com/googleapis/gcp-metadata/commit/af8362b5a35d270af00cb3696bbf7344810e9b0c)) + +### [3.2.1](https://www.github.com/googleapis/gcp-metadata/compare/v3.2.0...v3.2.1) (2019-11-08) + + +### Bug Fixes + +* **deps:** update gaxios ([#257](https://www.github.com/googleapis/gcp-metadata/issues/257)) ([ba6e0b6](https://www.github.com/googleapis/gcp-metadata/commit/ba6e0b668635b4aa4ed10535ff021c02b2edf5ea)) + +## [3.2.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.1.0...v3.2.0) (2019-10-10) + + +### Features + +* add DEBUG_AUTH for digging into authentication issues ([#254](https://www.github.com/googleapis/gcp-metadata/issues/254)) ([804156d](https://www.github.com/googleapis/gcp-metadata/commit/804156d)) + +## [3.1.0](https://www.github.com/googleapis/gcp-metadata/compare/v3.0.0...v3.1.0) (2019-10-07) + + +### Features + +* don't throw on ENETUNREACH ([#250](https://www.github.com/googleapis/gcp-metadata/issues/250)) ([88f2101](https://www.github.com/googleapis/gcp-metadata/commit/88f2101)) + +## [3.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.4...v3.0.0) (2019-09-17) + + +### ⚠ BREAKING CHANGES + +* isAvailable now tries both DNS and IP, choosing whichever responds first (#239) + +### Features + +* isAvailable now tries both DNS and IP, choosing whichever responds first ([#239](https://www.github.com/googleapis/gcp-metadata/issues/239)) ([25bc116](https://www.github.com/googleapis/gcp-metadata/commit/25bc116)) + +### [2.0.4](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.3...v2.0.4) (2019-09-13) + + +### Bug Fixes + +* IP address takes 15 seconds to timeout, vs., metadata returning immediately ([#235](https://www.github.com/googleapis/gcp-metadata/issues/235)) ([d04207b](https://www.github.com/googleapis/gcp-metadata/commit/d04207b)) +* use 3s timeout rather than 15 default ([#237](https://www.github.com/googleapis/gcp-metadata/issues/237)) ([231ca5c](https://www.github.com/googleapis/gcp-metadata/commit/231ca5c)) + +### [2.0.3](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.2...v2.0.3) (2019-09-12) + + +### Bug Fixes + +* use IP for metadata server ([#233](https://www.github.com/googleapis/gcp-metadata/issues/233)) ([20a15cb](https://www.github.com/googleapis/gcp-metadata/commit/20a15cb)) + +### [2.0.2](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.1...v2.0.2) (2019-08-26) + + +### Bug Fixes + +* allow calls with no request, add JSON proto ([#224](https://www.github.com/googleapis/gcp-metadata/issues/224)) ([dc758b1](https://www.github.com/googleapis/gcp-metadata/commit/dc758b1)) + +### [2.0.1](https://www.github.com/googleapis/gcp-metadata/compare/v2.0.0...v2.0.1) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#212](https://www.github.com/googleapis/gcp-metadata/issues/212)) ([9174b43](https://www.github.com/googleapis/gcp-metadata/commit/9174b43)) + +## [2.0.0](https://www.github.com/googleapis/gcp-metadata/compare/v1.0.0...v2.0.0) (2019-05-07) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v2 ([#191](https://www.github.com/googleapis/gcp-metadata/issues/191)) ([ac8c1ef](https://www.github.com/googleapis/gcp-metadata/commit/ac8c1ef)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#194](https://www.github.com/googleapis/gcp-metadata/issues/194)) ([97c23c8](https://www.github.com/googleapis/gcp-metadata/commit/97c23c8)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#194) + +## v1.0.0 + +02-14-2019 16:00 PST + +### Bug Fixes +- fix: ask gaxios for text and not json ([#152](https://github.com/googleapis/gcp-metadata/pull/152)) + +### Documentation +- docs: update links in contrib guide ([#168](https://github.com/googleapis/gcp-metadata/pull/168)) +- docs: add lint/fix example to contributing guide ([#160](https://github.com/googleapis/gcp-metadata/pull/160)) + +### Internal / Testing Changes +- build: use linkinator for docs test ([#166](https://github.com/googleapis/gcp-metadata/pull/166)) +- chore(deps): update dependency @types/tmp to v0.0.34 ([#167](https://github.com/googleapis/gcp-metadata/pull/167)) +- build: create docs test npm scripts ([#165](https://github.com/googleapis/gcp-metadata/pull/165)) +- test: run system tests on GCB ([#157](https://github.com/googleapis/gcp-metadata/pull/157)) +- build: test using @grpc/grpc-js in CI ([#164](https://github.com/googleapis/gcp-metadata/pull/164)) +- chore: move CONTRIBUTING.md to root ([#162](https://github.com/googleapis/gcp-metadata/pull/162)) +- chore(deps): update dependency gcx to v0.1.1 ([#159](https://github.com/googleapis/gcp-metadata/pull/159)) +- chore(deps): update dependency gcx to v0.1.0 ([#158](https://github.com/googleapis/gcp-metadata/pull/158)) +- chore(deps): update dependency gcx to v0.0.4 ([#155](https://github.com/googleapis/gcp-metadata/pull/155)) +- chore(deps): update dependency googleapis to v37 ([#156](https://github.com/googleapis/gcp-metadata/pull/156)) +- build: ignore googleapis.com in doc link check ([#153](https://github.com/googleapis/gcp-metadata/pull/153)) +- build: check broken links in generated docs ([#149](https://github.com/googleapis/gcp-metadata/pull/149)) +- chore(build): inject yoshi automation key ([#148](https://github.com/googleapis/gcp-metadata/pull/148)) + +## v0.9.3 + +12-10-2018 16:16 PST + +### Dependencies +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) + +### Internal / Testing Changes +- fix(build): fix Kokoro release script ([#141](https://github.com/googleapis/gcp-metadata/pull/141)) +- Release v0.9.2 ([#140](https://github.com/googleapis/gcp-metadata/pull/140)) +- build: add Kokoro configs for autorelease ([#138](https://github.com/googleapis/gcp-metadata/pull/138)) +- Release gcp-metadata v0.9.1 ([#139](https://github.com/googleapis/gcp-metadata/pull/139)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- Sync repo build files ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) + +## v0.9.2 + +12-10-2018 14:01 PST + +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- chore: Re-generated to pick up changes in the API or client library generator. ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) +- build: add Kokoro configs for autorelease ([#138](https://github.com/googleapis/gcp-metadata/pull/138)) + +## v0.9.1 + +12-10-2018 11:53 PST + +- chore(deps): update dependency googleapis to v36 ([#135](https://github.com/googleapis/gcp-metadata/pull/135)) +- chore: always nyc report before calling codecov ([#134](https://github.com/googleapis/gcp-metadata/pull/134)) +- chore: nyc ignore build/test by default ([#133](https://github.com/googleapis/gcp-metadata/pull/133)) +- chore: Re-generated to pick up changes in the API or client library generator. ([#131](https://github.com/googleapis/gcp-metadata/pull/131)) +- fix(build): fix system key decryption ([#128](https://github.com/googleapis/gcp-metadata/pull/128)) +- chore(deps): use gaxios for http requests ([#121](https://github.com/googleapis/gcp-metadata/pull/121)) +- refactor: use execa, move post install test to system ([#127](https://github.com/googleapis/gcp-metadata/pull/127)) +- chore: add a synth.metadata +- test: add a system test ([#126](https://github.com/googleapis/gcp-metadata/pull/126)) +- chore(deps): update dependency gts to ^0.9.0 ([#123](https://github.com/googleapis/gcp-metadata/pull/123)) +- chore: update eslintignore config ([#122](https://github.com/googleapis/gcp-metadata/pull/122)) +- chore: use latest npm on Windows ([#120](https://github.com/googleapis/gcp-metadata/pull/120)) +- chore: update CircleCI config ([#119](https://github.com/googleapis/gcp-metadata/pull/119)) +- chore: include build in eslintignore ([#115](https://github.com/googleapis/gcp-metadata/pull/115)) + +## v0.9.0 + +10-26-2018 13:10 PDT + +- feat: allow custom headers ([#109](https://github.com/googleapis/gcp-metadata/pull/109)) +- chore: update issue templates ([#108](https://github.com/googleapis/gcp-metadata/pull/108)) +- chore: remove old issue template ([#106](https://github.com/googleapis/gcp-metadata/pull/106)) +- build: run tests on node11 ([#105](https://github.com/googleapis/gcp-metadata/pull/105)) +- chores(build): do not collect sponge.xml from windows builds ([#104](https://github.com/googleapis/gcp-metadata/pull/104)) +- chores(build): run codecov on continuous builds ([#102](https://github.com/googleapis/gcp-metadata/pull/102)) +- chore(deps): update dependency nock to v10 ([#103](https://github.com/googleapis/gcp-metadata/pull/103)) +- chore: update new issue template ([#101](https://github.com/googleapis/gcp-metadata/pull/101)) +- build: fix codecov uploading on Kokoro ([#97](https://github.com/googleapis/gcp-metadata/pull/97)) +- Update kokoro config ([#95](https://github.com/googleapis/gcp-metadata/pull/95)) +- Update CI config ([#93](https://github.com/googleapis/gcp-metadata/pull/93)) +- Update kokoro config ([#91](https://github.com/googleapis/gcp-metadata/pull/91)) +- Re-generate library using /synth.py ([#90](https://github.com/googleapis/gcp-metadata/pull/90)) +- test: remove appveyor config ([#89](https://github.com/googleapis/gcp-metadata/pull/89)) +- Update kokoro config ([#88](https://github.com/googleapis/gcp-metadata/pull/88)) +- Enable prefer-const in the eslint config ([#87](https://github.com/googleapis/gcp-metadata/pull/87)) +- Enable no-var in eslint ([#86](https://github.com/googleapis/gcp-metadata/pull/86)) + +### New Features + +A new option, `headers`, has been added to allow metadata queries to be sent with custom headers. + +## v0.8.0 + +**This release has breaking changes**. Please take care when upgrading to the latest version. + +#### Dropped support for Node.js 4.x and 9.x +This library is no longer tested against versions 4.x and 9.x of Node.js. Please upgrade to the latest supported LTS version! + +#### Return type of `instance()` and `project()` has changed +The `instance()` and `project()` methods are much more selective about which properties they will accept. + +The only accepted properties are `params` and `properties`. The `instance()` and `project()` methods also now directly return the data instead of a response object. + +#### Changes in how large number valued properties are handled + +Previously large number-valued properties were being silently losing precision when +returned by this library (as a number). In the cases where a number valued property +returned by the metadata service is too large to represent as a JavaScript number, we +will now return the value as a BigNumber (from the bignumber.js) library. Numbers that +do fit into the JavaScript number range will continue to be returned as numbers. +For more details see [#74](https://github.com/googleapis/gcp-metadata/pull/74). + +### Breaking Changes +- chore: drop support for node.js 4 and 9 ([#68](https://github.com/googleapis/gcp-metadata/pull/68)) +- fix: quarantine axios config ([#62](https://github.com/googleapis/gcp-metadata/pull/62)) + +### Implementation Changes +- fix: properly handle large numbers in responses ([#74](https://github.com/googleapis/gcp-metadata/pull/74)) + +### Dependencies +- chore(deps): update dependency pify to v4 ([#73](https://github.com/googleapis/gcp-metadata/pull/73)) + +### Internal / Testing Changes +- Move to the new github org ([#84](https://github.com/googleapis/gcp-metadata/pull/84)) +- Update CI config ([#83](https://github.com/googleapis/gcp-metadata/pull/83)) +- Retry npm install in CI ([#81](https://github.com/googleapis/gcp-metadata/pull/81)) +- Update CI config ([#79](https://github.com/googleapis/gcp-metadata/pull/79)) +- chore(deps): update dependency nyc to v13 ([#77](https://github.com/googleapis/gcp-metadata/pull/77)) +- add key for system tests +- increase kitchen test timeout +- add a lint npm script +- update npm scripts +- add a synth file and run it ([#75](https://github.com/googleapis/gcp-metadata/pull/75)) +- chore(deps): update dependency assert-rejects to v1 ([#72](https://github.com/googleapis/gcp-metadata/pull/72)) +- chore: ignore package-log.json ([#71](https://github.com/googleapis/gcp-metadata/pull/71)) +- chore: update renovate config ([#70](https://github.com/googleapis/gcp-metadata/pull/70)) +- test: throw on deprecation +- chore(deps): update dependency typescript to v3 ([#67](https://github.com/googleapis/gcp-metadata/pull/67)) +- chore: make it OSPO compliant ([#66](https://github.com/googleapis/gcp-metadata/pull/66)) +- chore(deps): update dependency gts to ^0.8.0 ([#65](https://github.com/googleapis/gcp-metadata/pull/65)) diff --git a/node_modules/gcp-metadata/LICENSE b/node_modules/gcp-metadata/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/gcp-metadata/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/gcp-metadata/README.md b/node_modules/gcp-metadata/README.md new file mode 100644 index 0000000..fd93aea --- /dev/null +++ b/node_modules/gcp-metadata/README.md @@ -0,0 +1,235 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [GCP Metadata: Node.js Client](https://github.com/googleapis/gcp-metadata) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/gcp-metadata.svg)](https://www.npmjs.org/package/gcp-metadata) + + + + +Get the metadata from a Google Cloud Platform environment + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/gcp-metadata/blob/main/CHANGELOG.md). + +* [GCP Metadata Node.js Client API Reference][client-docs] +* [GCP Metadata Documentation][product-docs] +* [github.com/googleapis/gcp-metadata](https://github.com/googleapis/gcp-metadata) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + * [Using the client library](#using-the-client-library) +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install gcp-metadata +``` + + +### Using the client library + +```javascript +const gcpMetadata = require('gcp-metadata'); + +async function quickstart() { + // check to see if this code can access a metadata server + const isAvailable = await gcpMetadata.isAvailable(); + console.log(`Is available: ${isAvailable}`); + + // Instance and Project level metadata will only be available if + // running inside of a Google Cloud compute environment such as + // Cloud Functions, App Engine, Kubernetes Engine, or Compute Engine. + // To learn more about the differences between instance and project + // level metadata, see: + // https://cloud.google.com/compute/docs/storing-retrieving-metadata#project-instance-metadata + if (isAvailable) { + // grab all top level metadata from the service + const instanceMetadata = await gcpMetadata.instance(); + console.log('Instance metadata:'); + console.log(instanceMetadata); + + // get all project level metadata + const projectMetadata = await gcpMetadata.project(); + console.log('Project metadata:'); + console.log(projectMetadata); + } +} + +quickstart(); + +``` + +#### Check to see if the metadata server is available +```js +const isAvailable = await gcpMetadata.isAvailable(); +``` + +#### Access all metadata + +```js +const data = await gcpMetadata.instance(); +console.log(data); // ... All metadata properties +``` + +#### Access specific properties +```js +const data = await gcpMetadata.instance('hostname'); +console.log(data); // ...Instance hostname +const projectId = await gcpMetadata.project('project-id'); +console.log(projectId); // ...Project ID of the running instance +``` + +#### Access nested properties with the relative path +```js +const data = await gcpMetadata.instance('service-accounts/default/email'); +console.log(data); // ...Email address of the Compute identity service account +``` + +#### Access specific properties with query parameters +```js +const data = await gcpMetadata.instance({ + property: 'tags', + params: { alt: 'text' } +}); +console.log(data) // ...Tags as newline-delimited list +``` + +#### Access with custom headers +```js +await gcpMetadata.instance({ + headers: { 'no-trace': '1' } +}); // ...Request is untraced +``` + +### Take care with large number valued properties + +In some cases number valued properties returned by the Metadata Service may be +too large to be representable as JavaScript numbers. In such cases we return +those values as `BigNumber` objects (from the [bignumber.js](https://github.com/MikeMcl/bignumber.js) library). Numbers +that fit within the JavaScript number range will be returned as normal number +values. + +```js +const id = await gcpMetadata.instance('id'); +console.log(id) // ... BigNumber { s: 1, e: 18, c: [ 45200, 31799277581759 ] } +console.log(id.toString()) // ... 4520031799277581759 +``` + +### Environment variables + +* `GCE_METADATA_HOST`: provide an alternate host or IP to perform lookup against (useful, for example, you're connecting through a custom proxy server). + + For example: + ``` + export GCE_METADATA_HOST='169.254.169.254' + ``` + +* `DETECT_GCP_RETRIES`: number representing number of retries that should be attempted on metadata lookup. + +* `DEBUG_AUTH`: emit debugging logs + +* `METADATA_SERVER_DETECTION`: configure desired metadata server availability check behavior. + + * `assume-present`: don't try to ping the metadata server, but assume it's present + * `none`: don't try to ping the metadata server, but don't try to use it either + * `bios-only`: treat the result of a BIOS probe as canonical (don't fall back to pinging) + * `ping-only`: skip the BIOS probe, and go straight to pinging + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/gcp-metadata/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Quickstart | [source code](https://github.com/googleapis/gcp-metadata/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/gcp-metadata&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | + + + +The [GCP Metadata Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install gcp-metadata@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/gcp-metadata/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/gcp-metadata/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/gcp-metadata/latest +[product-docs]: https://cloud.google.com/compute/docs/storing-retrieving-metadata +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.d.ts b/node_modules/gcp-metadata/build/src/gcp-residency.d.ts new file mode 100644 index 0000000..f39896f --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.d.ts @@ -0,0 +1,57 @@ +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Known paths unique to Google Compute Engine Linux instances + */ +export declare const GCE_LINUX_BIOS_PATHS: { + BIOS_DATE: string; + BIOS_VENDOR: string; +}; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +export declare function isGoogleCloudServerless(): boolean; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +export declare function isGoogleComputeEngineLinux(): boolean; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +export declare function isGoogleComputeEngineMACAddress(): boolean; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +export declare function isGoogleComputeEngine(): boolean; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +export declare function detectGCPResidency(): boolean; diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.js b/node_modules/gcp-metadata/build/src/gcp-residency.js new file mode 100644 index 0000000..cde842b --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.js @@ -0,0 +1,114 @@ +"use strict"; +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = require("fs"); +const os_1 = require("os"); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +exports.isGoogleCloudServerless = isGoogleCloudServerless; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +exports.isGoogleComputeEngine = isGoogleComputeEngine; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/gcp-residency.js.map b/node_modules/gcp-metadata/build/src/gcp-residency.js.map new file mode 100644 index 0000000..602eb5f --- /dev/null +++ b/node_modules/gcp-metadata/build/src/gcp-residency.js.map @@ -0,0 +1 @@ +{"version":3,"file":"gcp-residency.js","sourceRoot":"","sources":["../../src/gcp-residency.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2BAA0C;AAC1C,2BAA+C;AAE/C;;GAEG;AACU,QAAA,oBAAoB,GAAG;IAClC,SAAS,EAAE,6BAA6B;IACxC,WAAW,EAAE,+BAA+B;CAC7C,CAAC;AAEF,MAAM,qBAAqB,GAAG,QAAQ,CAAC;AAEvC;;;;;;;;GAQG;AACH,SAAgB,uBAAuB;IACrC;;;;;;;;;;OAUG;IACH,MAAM,eAAe,GACnB,OAAO,CAAC,GAAG,CAAC,aAAa;QACzB,OAAO,CAAC,GAAG,CAAC,aAAa;QACzB,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC;IAExB,OAAO,CAAC,CAAC,eAAe,CAAC;AAC3B,CAAC;AAlBD,0DAkBC;AAED;;;;GAIG;AACH,SAAgB,0BAA0B;IACxC,IAAI,IAAA,aAAQ,GAAE,KAAK,OAAO;QAAE,OAAO,KAAK,CAAC;IAEzC,IAAI;QACF,yBAAyB;QACzB,IAAA,aAAQ,EAAC,4BAAoB,CAAC,SAAS,CAAC,CAAC;QAEzC,qCAAqC;QACrC,MAAM,UAAU,GAAG,IAAA,iBAAY,EAAC,4BAAoB,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QAE1E,OAAO,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;KAClC;IAAC,WAAM;QACN,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAdD,gEAcC;AAED;;;;;GAKG;AACH,SAAgB,+BAA+B;IAC7C,MAAM,UAAU,GAAG,IAAA,sBAAiB,GAAE,CAAC;IAEvC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE;QAC5C,IAAI,CAAC,IAAI;YAAE,SAAS;QAEpB,KAAK,MAAM,EAAC,GAAG,EAAC,IAAI,IAAI,EAAE;YACxB,IAAI,qBAAqB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACnC,OAAO,IAAI,CAAC;aACb;SACF;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAdD,0EAcC;AAED;;;;GAIG;AACH,SAAgB,qBAAqB;IACnC,OAAO,0BAA0B,EAAE,IAAI,+BAA+B,EAAE,CAAC;AAC3E,CAAC;AAFD,sDAEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB;IAChC,OAAO,uBAAuB,EAAE,IAAI,qBAAqB,EAAE,CAAC;AAC9D,CAAC;AAFD,gDAEC"} \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/index.d.ts b/node_modules/gcp-metadata/build/src/index.d.ts new file mode 100644 index 0000000..3de8e5d --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.d.ts @@ -0,0 +1,148 @@ +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +/// +import { OutgoingHttpHeaders } from 'http'; +export declare const BASE_PATH = "/computeMetadata/v1"; +export declare const HOST_ADDRESS = "http://169.254.169.254"; +export declare const SECONDARY_HOST_ADDRESS = "http://metadata.google.internal."; +export declare const HEADER_NAME = "Metadata-Flavor"; +export declare const HEADER_VALUE = "Google"; +export declare const HEADERS: Readonly<{ + "Metadata-Flavor": "Google"; +}>; +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +export declare const METADATA_SERVER_DETECTION: Readonly<{ + 'assume-present': "don't try to ping the metadata server, but assume it's present"; + none: "don't try to ping the metadata server, but don't try to use it either"; + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)"; + 'ping-only': "skip the BIOS probe, and go straight to pinging"; +}>; +export interface Options { + params?: { + [index: string]: string; + }; + property?: string; + headers?: OutgoingHttpHeaders; +} +export interface MetadataAccessor { + /** + * + * @example + * + * // equivalent to `project('project-id')`; + * const metadataKey = 'project/project-id'; + */ + metadataKey: string; + params?: Options['params']; + headers?: Options['headers']; + noResponseRetries?: number; + fastFail?: boolean; +} +export type BulkResults = { + [key in T[number]['metadataKey']]: ReturnType; +}; +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +export declare function instance(options?: string | Options): Promise; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +export declare function project(options?: string | Options): Promise; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +export declare function universe(options?: string | Options): Promise; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +export declare function bulk[], R extends BulkResults = BulkResults>(properties: T): Promise; +/** + * Determine if the metadata server is currently available. + */ +export declare function isAvailable(): Promise; +/** + * reset the memoized isAvailable() lookup. + */ +export declare function resetIsAvailableCache(): void; +/** + * A cache for the detected GCP Residency. + */ +export declare let gcpResidencyCache: boolean | null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +export declare function getGCPResidency(): boolean; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +export declare function setGCPResidency(value?: boolean | null): void; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +export declare function requestTimeout(): number; +export * from './gcp-residency'; diff --git a/node_modules/gcp-metadata/build/src/index.js b/node_modules/gcp-metadata/build/src/index.js new file mode 100644 index 0000000..347a668 --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.js @@ -0,0 +1,405 @@ +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = require("gaxios"); +const jsonBigint = require("json-bigint"); +const gcp_residency_1 = require("./gcp-residency"); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; + } + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; + } +} +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +exports.instance = instance; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +exports.project = project; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +exports.universe = universe; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +exports.bulk = bulk; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; +} +exports.getGCPResidency = getGCPResidency; +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +exports.setGCPResidency = setGCPResidency; +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +exports.requestTimeout = requestTimeout; +__exportStar(require("./gcp-residency"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gcp-metadata/build/src/index.js.map b/node_modules/gcp-metadata/build/src/index.js.map new file mode 100644 index 0000000..48fa58c --- /dev/null +++ b/node_modules/gcp-metadata/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;;;;;;;;;;;;;;AAEH,mCAA2E;AAE3E,0CAA2C;AAC3C,mDAAmD;AAEtC,QAAA,SAAS,GAAG,qBAAqB,CAAC;AAClC,QAAA,YAAY,GAAG,wBAAwB,CAAC;AACxC,QAAA,sBAAsB,GAAG,kCAAkC,CAAC;AAE5D,QAAA,WAAW,GAAG,iBAAiB,CAAC;AAChC,QAAA,YAAY,GAAG,QAAQ,CAAC;AACxB,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAC,CAAC,mBAAW,CAAC,EAAE,oBAAY,EAAC,CAAC,CAAC;AAEpE;;;;GAIG;AACU,QAAA,yBAAyB,GAAG,MAAM,CAAC,MAAM,CAAC;IACrD,gBAAgB,EACd,gEAAgE;IAClE,IAAI,EAAE,uEAAuE;IAC7E,WAAW,EACT,4EAA4E;IAC9E,WAAW,EAAE,iDAAiD;CAC/D,CAAC,CAAC;AA2BH;;;;;GAKG;AACH,SAAS,UAAU,CAAC,OAAgB;IAClC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO;YACL,OAAO,CAAC,GAAG,CAAC,eAAe;gBAC3B,OAAO,CAAC,GAAG,CAAC,iBAAiB;gBAC7B,oBAAY,CAAC;KAChB;IACD,4CAA4C;IAC5C,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;QACjC,OAAO,GAAG,UAAU,OAAO,EAAE,CAAC;KAC/B;IACD,OAAO,IAAI,GAAG,CAAC,iBAAS,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC;AAC1C,CAAC;AAED,yEAAyE;AACzE,wEAAwE;AACxE,yEAAyE;AACzE,2EAA2E;AAC3E,wCAAwC;AACxC,SAAS,QAAQ,CAAC,OAAgB;IAChC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACjC,QAAQ,GAAG,EAAE;YACX,KAAK,QAAQ,CAAC;YACd,KAAK,UAAU,CAAC;YAChB,KAAK,SAAS;gBACZ,MAAM;YACR,KAAK,IAAI;gBACP,MAAM,IAAI,KAAK,CACb,wEAAwE,CACzE,CAAC;YACJ;gBACE,MAAM,IAAI,KAAK,CAAC,IAAI,GAAG,wCAAwC,CAAC,CAAC;SACpE;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AASD,KAAK,UAAU,gBAAgB,CAC7B,IAA+B,EAC/B,UAA4B,EAAE,EAC9B,iBAAiB,GAAG,CAAC,EACrB,QAAQ,GAAG,KAAK;IAEhB,IAAI,WAAW,GAAG,EAAE,CAAC;IACrB,IAAI,MAAM,GAAO,EAAE,CAAC;IACpB,IAAI,OAAO,GAAwB,EAAE,CAAC;IAEtC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,MAAM,gBAAgB,GAAqB,IAAI,CAAC;QAEhD,WAAW,GAAG,gBAAgB,CAAC,WAAW,CAAC;QAC3C,MAAM,GAAG,gBAAgB,CAAC,MAAM,IAAI,MAAM,CAAC;QAC3C,OAAO,GAAG,gBAAgB,CAAC,OAAO,IAAI,OAAO,CAAC;QAC9C,iBAAiB,GAAG,gBAAgB,CAAC,iBAAiB,IAAI,iBAAiB,CAAC;QAC5E,QAAQ,GAAG,gBAAgB,CAAC,QAAQ,IAAI,QAAQ,CAAC;KAClD;SAAM;QACL,WAAW,GAAG,IAAI,CAAC;KACpB;IAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,WAAW,IAAI,IAAI,OAAO,EAAE,CAAC;KAC9B;SAAM;QACL,QAAQ,CAAC,OAAO,CAAC,CAAC;QAElB,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,WAAW,IAAI,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;SACvC;QAED,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC;QACrC,MAAM,GAAG,OAAO,CAAC,MAAM,IAAI,MAAM,CAAC;KACnC;IAED,IAAI;QACF,MAAM,aAAa,GAAG,QAAQ,CAAC,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,gBAAO,CAAC;QACnE,MAAM,GAAG,GAAG,MAAM,aAAa,CAAI;YACjC,GAAG,EAAE,GAAG,UAAU,EAAE,IAAI,WAAW,EAAE;YACrC,OAAO,EAAE,EAAC,GAAG,eAAO,EAAE,GAAG,OAAO,EAAC;YACjC,WAAW,EAAE,EAAC,iBAAiB,EAAC;YAChC,MAAM;YACN,YAAY,EAAE,MAAM;YACpB,OAAO,EAAE,cAAc,EAAE;SAC1B,CAAC,CAAC;QACH,6DAA6D;QAC7D,IAAI,GAAG,CAAC,OAAO,CAAC,mBAAW,CAAC,WAAW,EAAE,CAAC,KAAK,oBAAY,EAAE;YAC3D,MAAM,IAAI,KAAK,CACb,qDAAqD,mBAAW,UAAU,CAC3E,CAAC;SACH;aAAM,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;YACpB,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAC;SAC/D;QACD,IAAI,OAAO,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,IAAI;gBACF,OAAO,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;aACnC;YAAC,WAAM;gBACN,YAAY;aACb;SACF;QACD,OAAO,GAAG,CAAC,IAAI,CAAC;KACjB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,CAAgB,CAAC;QAC7B,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC/C,GAAG,CAAC,OAAO,GAAG,sCAAsC,GAAG,CAAC,OAAO,EAAE,CAAC;SACnE;QACD,MAAM,CAAC,CAAC;KACT;AACH,CAAC;AAED,KAAK,UAAU,uBAAuB,CACpC,OAAsB;IAEtB,MAAM,gBAAgB,GAAG;QACvB,GAAG,OAAO;QACV,GAAG,EAAE,OAAO,CAAC,GAAI,CAAC,OAAO,CAAC,UAAU,EAAE,EAAE,UAAU,CAAC,8BAAsB,CAAC,CAAC;KAC5E,CAAC;IACF,6EAA6E;IAC7E,oBAAoB;IACpB,EAAE;IACF,0EAA0E;IAC1E,yDAAyD;IACzD,wEAAwE;IACxE,gCAAgC;IAChC,EAAE;IACF,uEAAuE;IACvE,oEAAoE;IACpE,mBAAmB;IACnB,EAAE;IACF,6EAA6E;IAC7E,4CAA4C;IAC5C,EAAE;IACF,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,MAAM,EAAE,GAA4B,IAAA,gBAAO,EAAI,OAAO,CAAC;SACpD,IAAI,CAAC,GAAG,CAAC,EAAE;QACV,SAAS,GAAG,IAAI,CAAC;QACjB,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;SACD,KAAK,CAAC,GAAG,CAAC,EAAE;QACX,IAAI,SAAS,EAAE;YACb,OAAO,EAAE,CAAC;SACX;aAAM;YACL,SAAS,GAAG,IAAI,CAAC;YACjB,MAAM,GAAG,CAAC;SACX;IACH,CAAC,CAAC,CAAC;IACL,MAAM,EAAE,GAA4B,IAAA,gBAAO,EAAI,gBAAgB,CAAC;SAC7D,IAAI,CAAC,GAAG,CAAC,EAAE;QACV,SAAS,GAAG,IAAI,CAAC;QACjB,OAAO,GAAG,CAAC;IACb,CAAC,CAAC;SACD,KAAK,CAAC,GAAG,CAAC,EAAE;QACX,IAAI,SAAS,EAAE;YACb,OAAO,EAAE,CAAC;SACX;aAAM;YACL,SAAS,GAAG,IAAI,CAAC;YACjB,MAAM,GAAG,CAAC;SACX;IACH,CAAC,CAAC,CAAC;IACL,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;AAChC,CAAC;AAED;;;;;;;;;;GAUG;AACH,8DAA8D;AAC9D,SAAgB,QAAQ,CAAU,OAA0B;IAC1D,OAAO,gBAAgB,CAAI,UAAU,EAAE,OAAO,CAAC,CAAC;AAClD,CAAC;AAFD,4BAEC;AAED;;;;;;;;;;GAUG;AACH,8DAA8D;AAC9D,SAAgB,OAAO,CAAU,OAA0B;IACzD,OAAO,gBAAgB,CAAI,SAAS,EAAE,OAAO,CAAC,CAAC;AACjD,CAAC;AAFD,0BAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,QAAQ,CAAI,OAA0B;IACpD,OAAO,gBAAgB,CAAI,UAAU,EAAE,OAAO,CAAC,CAAC;AAClD,CAAC;AAFD,4BAEC;AAED;;;;;;;;;;;;;;;;;;;;;;GAsBG;AACI,KAAK,UAAU,IAAI,CAGxB,UAAa;IACb,MAAM,CAAC,GAAG,EAAoB,CAAC;IAE/B,MAAM,OAAO,CAAC,GAAG,CACf,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACpB,OAAO,CAAC,KAAK,IAAI,EAAE;YACjB,MAAM,GAAG,GAAG,MAAM,gBAAgB,CAAC,IAAI,CAAC,CAAC;YACzC,MAAM,GAAG,GAAG,IAAI,CAAC,WAA6B,CAAC;YAE/C,CAAC,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;QACf,CAAC,CAAC,EAAE,CAAC;IACP,CAAC,CAAC,CACH,CAAC;IAEF,OAAO,CAAM,CAAC;AAChB,CAAC;AAlBD,oBAkBC;AAED;;GAEG;AACH,SAAS,yBAAyB;IAChC,OAAO,OAAO,CAAC,GAAG,CAAC,kBAAkB;QACnC,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC;QACxC,CAAC,CAAC,CAAC,CAAC;AACR,CAAC;AAED,IAAI,yBAAuD,CAAC;AAE5D;;GAEG;AACI,KAAK,UAAU,WAAW;IAC/B,IAAI,OAAO,CAAC,GAAG,CAAC,yBAAyB,EAAE;QACzC,MAAM,KAAK,GACT,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,IAAI,EAAE,CAAC,iBAAiB,EAAE,CAAC;QAEnE,IAAI,CAAC,CAAC,KAAK,IAAI,iCAAyB,CAAC,EAAE;YACzC,MAAM,IAAI,UAAU,CAClB,6DAA6D,KAAK,0BAA0B,MAAM,CAAC,IAAI,CACrG,iCAAyB,CAC1B,CAAC,IAAI,CAAC,MAAM,CAAC,cAAc,CAC7B,CAAC;SACH;QAED,QAAQ,KAA+C,EAAE;YACvD,KAAK,gBAAgB;gBACnB,OAAO,IAAI,CAAC;YACd,KAAK,MAAM;gBACT,OAAO,KAAK,CAAC;YACf,KAAK,WAAW;gBACd,OAAO,eAAe,EAAE,CAAC;YAC3B,KAAK,WAAW,CAAC;YACjB,uCAAuC;SACxC;KACF;IAED,IAAI;QACF,qEAAqE;QACrE,oEAAoE;QACpE,uEAAuE;QACvE,8BAA8B;QAC9B,IAAI,yBAAyB,KAAK,SAAS,EAAE;YAC3C,yBAAyB,GAAG,gBAAgB,CAC1C,UAAU,EACV,SAAS,EACT,yBAAyB,EAAE;YAC3B,iEAAiE;YACjE,oEAAoE;YACpE,0BAA0B;YAC1B,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAChE,CAAC;SACH;QACD,MAAM,yBAAyB,CAAC;QAChC,OAAO,IAAI,CAAC;KACb;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,CAAiC,CAAC;QAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnB;QAED,IAAI,GAAG,CAAC,IAAI,KAAK,iBAAiB,EAAE;YAClC,mEAAmE;YACnE,aAAa;YACb,OAAO,KAAK,CAAC;SACd;QACD,IAAI,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;aAAM;YACL,IACE,CAAC,CAAC,GAAG,CAAC,QAAQ,IAAI,GAAG,CAAC,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC;gBAC9C,qEAAqE;gBACrE,oBAAoB;gBACpB,CAAC,CAAC,GAAG,CAAC,IAAI;oBACR,CAAC;wBACC,WAAW;wBACX,cAAc;wBACd,aAAa;wBACb,QAAQ;wBACR,WAAW;wBACX,cAAc;qBACf,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EACvB;gBACA,IAAI,IAAI,GAAG,SAAS,CAAC;gBACrB,IAAI,GAAG,CAAC,IAAI;oBAAE,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;gBAC9B,OAAO,CAAC,WAAW,CACjB,+BAA+B,GAAG,CAAC,OAAO,WAAW,IAAI,EAAE,EAC3D,uBAAuB,CACxB,CAAC;aACH;YAED,0EAA0E;YAC1E,OAAO,KAAK,CAAC;SACd;KACF;AACH,CAAC;AAnFD,kCAmFC;AAED;;GAEG;AACH,SAAgB,qBAAqB;IACnC,yBAAyB,GAAG,SAAS,CAAC;AACxC,CAAC;AAFD,sDAEC;AAED;;GAEG;AACQ,QAAA,iBAAiB,GAAmB,IAAI,CAAC;AAEpD;;;;;GAKG;AACH,SAAgB,eAAe;IAC7B,IAAI,yBAAiB,KAAK,IAAI,EAAE;QAC9B,eAAe,EAAE,CAAC;KACnB;IAED,OAAO,yBAAkB,CAAC;AAC5B,CAAC;AAND,0CAMC;AAED;;;;;;GAMG;AACH,SAAgB,eAAe,CAAC,QAAwB,IAAI;IAC1D,yBAAiB,GAAG,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAA,kCAAkB,GAAE,CAAC;AACpE,CAAC;AAFD,0CAEC;AAED;;;;;;;;GAQG;AACH,SAAgB,cAAc;IAC5B,OAAO,eAAe,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AACtC,CAAC;AAFD,wCAEC;AAED,kDAAgC"} \ No newline at end of file diff --git a/node_modules/gcp-metadata/package.json b/node_modules/gcp-metadata/package.json new file mode 100644 index 0000000..8962809 --- /dev/null +++ b/node_modules/gcp-metadata/package.json @@ -0,0 +1,70 @@ +{ + "name": "gcp-metadata", + "version": "6.1.0", + "description": "Get the metadata from a Google Cloud Platform environment", + "repository": "googleapis/gcp-metadata", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "files": [ + "build/src" + ], + "scripts": { + "compile": "cross-env NODE_OPTIONS=--max-old-space-size=8192 tsc -p .", + "fix": "gts fix", + "pretest": "npm run compile", + "prepare": "npm run compile", + "samples-test": "npm link && cd samples/ && npm link ../ && npm test && cd ../", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test --timeout 600000", + "test": "c8 mocha --timeout=5000 build/test", + "docs": "compodoc src/", + "lint": "gts check", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "clean": "gts clean", + "precompile": "gts clean" + }, + "keywords": [ + "google cloud platform", + "google cloud", + "google", + "app engine", + "compute engine", + "metadata server", + "metadata" + ], + "author": "Stephen Sawchuk", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.10", + "@google-cloud/functions": "^3.0.0", + "@types/json-bigint": "^1.0.0", + "@types/mocha": "^9.0.0", + "@types/ncp": "^2.0.1", + "@types/node": "^20.0.0", + "@types/sinon": "^17.0.0", + "@types/tmp": "0.2.6", + "@types/uuid": "^9.0.0", + "c8": "^8.0.0", + "cross-env": "^7.0.3", + "gcbuild": "^1.3.4", + "gcx": "^1.0.0", + "gts": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^8.0.0", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "sinon": "^17.0.0", + "tmp": "^0.2.0", + "typescript": "^5.1.6", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } +} diff --git a/node_modules/google-auth-library/CHANGELOG.md b/node_modules/google-auth-library/CHANGELOG.md new file mode 100644 index 0000000..22516d2 --- /dev/null +++ b/node_modules/google-auth-library/CHANGELOG.md @@ -0,0 +1,1290 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/google-auth-library?activeTab=versions + +## [9.7.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.6.3...v9.7.0) (2024-03-12) + + +### Features + +* `PassThrough` AuthClient ([#1771](https://github.com/googleapis/google-auth-library-nodejs/issues/1771)) ([0003bee](https://github.com/googleapis/google-auth-library-nodejs/commit/0003bee317dd8e99b553857edfffeb4a47a4af26)) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v15 ([#1772](https://github.com/googleapis/google-auth-library-nodejs/issues/1772)) ([f45f975](https://github.com/googleapis/google-auth-library-nodejs/commit/f45f9753a7c83bc04616a1bdbaf687b3f38a17d2)) +* Making aws request signer get a new session token each time security credentials are requested. ([#1765](https://github.com/googleapis/google-auth-library-nodejs/issues/1765)) ([6a6e496](https://github.com/googleapis/google-auth-library-nodejs/commit/6a6e49634863f61487688724d0d20632e03f0299)) + +## [9.6.3](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.6.2...v9.6.3) (2024-02-06) + + +### Bug Fixes + +* Always sign with `scopes` on Non-Default Universes ([#1752](https://github.com/googleapis/google-auth-library-nodejs/issues/1752)) ([f3d3a03](https://github.com/googleapis/google-auth-library-nodejs/commit/f3d3a03dbce42a400c11457131dd1fabc206826a)) + +## [9.6.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.6.1...v9.6.2) (2024-02-02) + + +### Bug Fixes + +* Allow Get Universe Without Credentials ([#1748](https://github.com/googleapis/google-auth-library-nodejs/issues/1748)) ([696db72](https://github.com/googleapis/google-auth-library-nodejs/commit/696db72bb8644739768d20375d670813d4490714)) + +## [9.6.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.6.0...v9.6.1) (2024-02-01) + + +### Bug Fixes + +* Universe Domain Resolution ([#1745](https://github.com/googleapis/google-auth-library-nodejs/issues/1745)) ([a4f9f9c](https://github.com/googleapis/google-auth-library-nodejs/commit/a4f9f9c65853a37e6e83861c5d22533dba774037)) + +## [9.6.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.5.0...v9.6.0) (2024-01-29) + + +### Features + +* Open More Endpoints for Customization ([#1721](https://github.com/googleapis/google-auth-library-nodejs/issues/1721)) ([effbf87](https://github.com/googleapis/google-auth-library-nodejs/commit/effbf87f6f0fd11a0cb1c749dad81737926dc436)) +* Use self-signed JWTs when non-default Universe Domains ([#1722](https://github.com/googleapis/google-auth-library-nodejs/issues/1722)) ([7e9876e](https://github.com/googleapis/google-auth-library-nodejs/commit/7e9876e2496b073220ca270368da7e9522da88f9)) + + +### Bug Fixes + +* Revert Missing `WORKFORCE_AUDIENCE_PATTERN` ([#1740](https://github.com/googleapis/google-auth-library-nodejs/issues/1740)) ([422de68](https://github.com/googleapis/google-auth-library-nodejs/commit/422de68d8d9ea66e6bf1fea923f61c8af0842420)) + +## [9.5.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.4.2...v9.5.0) (2024-01-25) + + +### Features + +* Improve Universe Domain Ergonomics ([#1732](https://github.com/googleapis/google-auth-library-nodejs/issues/1732)) ([eec82f5](https://github.com/googleapis/google-auth-library-nodejs/commit/eec82f5f48a250744b5c3200ef247c3eae184e2f)) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v14 ([#1725](https://github.com/googleapis/google-auth-library-nodejs/issues/1725)) ([594bf2c](https://github.com/googleapis/google-auth-library-nodejs/commit/594bf2cc808c03733274d6b08d92f1d4b12dd630)) +* Typos in samples ([#1728](https://github.com/googleapis/google-auth-library-nodejs/issues/1728)) ([058a503](https://github.com/googleapis/google-auth-library-nodejs/commit/058a5035e3e4df35663c6b3adef2dda617271849)) + +## [9.4.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.4.1...v9.4.2) (2024-01-04) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v13 ([#1710](https://github.com/googleapis/google-auth-library-nodejs/issues/1710)) ([7ae4aae](https://github.com/googleapis/google-auth-library-nodejs/commit/7ae4aaea0311de1a3e10f4e86a1ef93ff00656b5)) +* Expand credentials type ([#1719](https://github.com/googleapis/google-auth-library-nodejs/issues/1719)) ([846c8e9](https://github.com/googleapis/google-auth-library-nodejs/commit/846c8e96cc36feb05148c16e12127e805e388189)) + +## [9.4.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.4.0...v9.4.1) (2023-12-01) + + +### Bug Fixes + +* Support 404 When `GaxiosError` != `GaxiosError` ([#1707](https://github.com/googleapis/google-auth-library-nodejs/issues/1707)) ([704674f](https://github.com/googleapis/google-auth-library-nodejs/commit/704674fe14750d31e7d3a58d6f9b2387c89fb184)) + +## [9.4.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.3.0...v9.4.0) (2023-11-30) + + +### Features + +* Update gcp metadata dependency ([#1703](https://github.com/googleapis/google-auth-library-nodejs/issues/1703)) ([615bdd9](https://github.com/googleapis/google-auth-library-nodejs/commit/615bdd92d1eb1a2a5b8d8c2675fd53350d5fbc21)) + +## [9.3.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.2.0...v9.3.0) (2023-11-29) + + +### Features + +* Add impersonated signer ([#1694](https://github.com/googleapis/google-auth-library-nodejs/issues/1694)) ([cb78a1b](https://github.com/googleapis/google-auth-library-nodejs/commit/cb78a1bfb4104e05be85877fee56d703f749e525)) +* Retrieve `universe_domain` for Compute clients ([#1692](https://github.com/googleapis/google-auth-library-nodejs/issues/1692)) ([a735ec5](https://github.com/googleapis/google-auth-library-nodejs/commit/a735ec5f5e280a1c47f0f4d5e99acf228d5a320a)) + + +### Bug Fixes + +* Verifier algorithm ([#1696](https://github.com/googleapis/google-auth-library-nodejs/issues/1696)) ([c5080a0](https://github.com/googleapis/google-auth-library-nodejs/commit/c5080a0d56b52b90b5d26cfc5f7343afb58199a4)) + +## [9.2.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.1.0...v9.2.0) (2023-10-26) + + +### Features + +* Unify Base `AuthClient` Options ([#1663](https://github.com/googleapis/google-auth-library-nodejs/issues/1663)) ([5ac6705](https://github.com/googleapis/google-auth-library-nodejs/commit/5ac67052f6c19e93c3e8c4e1636fad4737fcee08)) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v12 ([#1671](https://github.com/googleapis/google-auth-library-nodejs/issues/1671)) ([4f94ffe](https://github.com/googleapis/google-auth-library-nodejs/commit/4f94ffe474ee41b560813b81e8d621be848d38d0)) +* DOS security risks ([#1668](https://github.com/googleapis/google-auth-library-nodejs/issues/1668)) ([b25d4f5](https://github.com/googleapis/google-auth-library-nodejs/commit/b25d4f54f88c52c8496ef65f5ab2a75122100f2c)) +* Increase max asset size ([#1682](https://github.com/googleapis/google-auth-library-nodejs/issues/1682)) ([edb9401](https://github.com/googleapis/google-auth-library-nodejs/commit/edb94018cadd2ad796ccec5496e3bfcbade39c7f)) +* Remove broken source maps ([#1669](https://github.com/googleapis/google-auth-library-nodejs/issues/1669)) ([56cb3ad](https://github.com/googleapis/google-auth-library-nodejs/commit/56cb3ad24c5e4b6952169b88d0a43e89cbf1252e)) + +## [9.1.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v9.0.0...v9.1.0) (2023-10-02) + + +### Features + +* Byoid metrics ([#1595](https://github.com/googleapis/google-auth-library-nodejs/issues/1595)) ([fe09d6b](https://github.com/googleapis/google-auth-library-nodejs/commit/fe09d6b4eb5a8215d21564a97bcc7ea5beb570bf)) +* Expose `gcp-metadata` ([#1655](https://github.com/googleapis/google-auth-library-nodejs/issues/1655)) ([d76140f](https://github.com/googleapis/google-auth-library-nodejs/commit/d76140f253c3903a22ed9087623be05e79a220f9)) +* Expose Compute's Service Account Email ([#1656](https://github.com/googleapis/google-auth-library-nodejs/issues/1656)) ([ef29297](https://github.com/googleapis/google-auth-library-nodejs/commit/ef29297215eb907c511627543391db9b43c1eac5)) +* Extend `universe_domain` support ([#1633](https://github.com/googleapis/google-auth-library-nodejs/issues/1633)) ([4ee02da](https://github.com/googleapis/google-auth-library-nodejs/commit/4ee02da4f6934eeb9550b8b659d1abda85194e6b)) +* Support Instantiating OAuth2Client with Credentials ([#1652](https://github.com/googleapis/google-auth-library-nodejs/issues/1652)) ([4339d64](https://github.com/googleapis/google-auth-library-nodejs/commit/4339d6410da7b792f96d31f62800507313c25430)) + + +### Bug Fixes + +* **deps:** Update dependency @google-cloud/storage to v7 ([#1629](https://github.com/googleapis/google-auth-library-nodejs/issues/1629)) ([8075b6f](https://github.com/googleapis/google-auth-library-nodejs/commit/8075b6fbb347067ec231134fb4388ab9bcd1bde0)) +* **deps:** Update dependency @googleapis/iam to v11 ([#1622](https://github.com/googleapis/google-auth-library-nodejs/issues/1622)) ([03c0ac9](https://github.com/googleapis/google-auth-library-nodejs/commit/03c0ac9950217ec08ac9ad66eaaa4e9a9e67d423)) +* **deps:** Update dependency google-auth-library to v9 ([#1618](https://github.com/googleapis/google-auth-library-nodejs/issues/1618)) ([1873fef](https://github.com/googleapis/google-auth-library-nodejs/commit/1873fefcdf4df00ff40b0f311e40dcdd402f799e)) +* **deps:** Update dependency puppeteer to v21 ([#1627](https://github.com/googleapis/google-auth-library-nodejs/issues/1627)) ([dd04fbd](https://github.com/googleapis/google-auth-library-nodejs/commit/dd04fbdeb17e457a09b06a4abc2aedf511f355ea)) +* Workforce Audience Pattern ([#1654](https://github.com/googleapis/google-auth-library-nodejs/issues/1654)) ([77eeaed](https://github.com/googleapis/google-auth-library-nodejs/commit/77eeaed1d66c89e855a15db0bb6b9dbd96179c8f)) + +## [9.0.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.9.0...v9.0.0) (2023-07-20) + + +### ⚠ BREAKING CHANGES + +* **deps:** upgrade node-gtoken to 7.0.0 ([#1590](https://github.com/googleapis/google-auth-library-nodejs/issues/1590)) +* make transporter attribute type more generic ([#1406](https://github.com/googleapis/google-auth-library-nodejs/issues/1406)) +* migrate to Node 14 ([#1582](https://github.com/googleapis/google-auth-library-nodejs/issues/1582)) +* remove arrify and fast-text-encoding ([#1583](https://github.com/googleapis/google-auth-library-nodejs/issues/1583)) + +### Features + +* Make transporter attribute type more generic ([#1406](https://github.com/googleapis/google-auth-library-nodejs/issues/1406)) ([dfac525](https://github.com/googleapis/google-auth-library-nodejs/commit/dfac5259fa573f25960949ae1c2f98bc78962717)) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v10 ([#1588](https://github.com/googleapis/google-auth-library-nodejs/issues/1588)) ([f95a153](https://github.com/googleapis/google-auth-library-nodejs/commit/f95a153409735a1f4c470fd7de18bf9ab5d5e771)) + + +### Build System + +* Remove arrify and fast-text-encoding ([#1583](https://github.com/googleapis/google-auth-library-nodejs/issues/1583)) ([d736da3](https://github.com/googleapis/google-auth-library-nodejs/commit/d736da3fa5536650ae6a3aebbcae408254ebd035)) + + +### Miscellaneous Chores + +* **deps:** Upgrade node-gtoken to 7.0.0 ([#1590](https://github.com/googleapis/google-auth-library-nodejs/issues/1590)) ([8738df9](https://github.com/googleapis/google-auth-library-nodejs/commit/8738df9e24c62213b5a78e34a70cdcf456a794fa)) +* Migrate to Node 14 ([#1582](https://github.com/googleapis/google-auth-library-nodejs/issues/1582)) ([6004dca](https://github.com/googleapis/google-auth-library-nodejs/commit/6004dca8d7e7aca7e570b56afd84d3c7f5d40242)) + +## [8.9.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.8.0...v8.9.0) (2023-06-29) + + +### Features + +* Adds universe_domain field to base external client ([#1548](https://github.com/googleapis/google-auth-library-nodejs/issues/1548)) ([7412d7c](https://github.com/googleapis/google-auth-library-nodejs/commit/7412d7c55c688601199b5201b29cfbe63c4c7a70)) +* Utilize `gcp-metadata`'s GCP Residency Check ([#1513](https://github.com/googleapis/google-auth-library-nodejs/issues/1513)) ([43377ac](https://github.com/googleapis/google-auth-library-nodejs/commit/43377ac7af1ece14b82b053b504449f257d11af6)) + + +### Bug Fixes + +* **deps:** Update dependency @googleapis/iam to v8 ([#1581](https://github.com/googleapis/google-auth-library-nodejs/issues/1581)) ([7373b75](https://github.com/googleapis/google-auth-library-nodejs/commit/7373b75c2205876f38ec2697f23758176a327fc7)) +* **deps:** Update dependency puppeteer to v20 ([#1550](https://github.com/googleapis/google-auth-library-nodejs/issues/1550)) ([3563c16](https://github.com/googleapis/google-auth-library-nodejs/commit/3563c16e2376729e399fefc0b1fad7bff78c296e)) +* IdTokenClient expiry_date check ([#1555](https://github.com/googleapis/google-auth-library-nodejs/issues/1555)) ([efcdef1](https://github.com/googleapis/google-auth-library-nodejs/commit/efcdef190fd94537d79087e7fcb26e5925a46173)) +* KeepAlive sample ([#1561](https://github.com/googleapis/google-auth-library-nodejs/issues/1561)) ([e3ab824](https://github.com/googleapis/google-auth-library-nodejs/commit/e3ab8247761b0a61f25e08a1b8fd6b200a4be200)) +* Make universeDomain private ([#1552](https://github.com/googleapis/google-auth-library-nodejs/issues/1552)) ([73b63d5](https://github.com/googleapis/google-auth-library-nodejs/commit/73b63d56fad8da033307e1be501a8846a5311211)) +* Remove auth lib version from x-goog-api-client header ([#1558](https://github.com/googleapis/google-auth-library-nodejs/issues/1558)) ([ce6013c](https://github.com/googleapis/google-auth-library-nodejs/commit/ce6013c6132ba2b82a1f8cba0a15c4710536204c)) + +## [8.8.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.7.0...v8.8.0) (2023-04-25) + + +### Features + +* Add External Account Authorized User client type ([#1530](https://github.com/googleapis/google-auth-library-nodejs/issues/1530)) ([06d4ef3](https://github.com/googleapis/google-auth-library-nodejs/commit/06d4ef3cc9ba1651af5b1f90c02b231862822ba2)) +* Document External Account Authorized User Credentials ([#1540](https://github.com/googleapis/google-auth-library-nodejs/issues/1540)) ([c6138be](https://github.com/googleapis/google-auth-library-nodejs/commit/c6138be12753c68d7645843451e1e3f9146e515a)) + + +### Bug Fixes + +* **deps:** Update `gcp-metadata` to v5.2.0 ([#1502](https://github.com/googleapis/google-auth-library-nodejs/issues/1502)) ([2562d11](https://github.com/googleapis/google-auth-library-nodejs/commit/2562d1192e0f89a3232897b8e27f24a14d5222f2)) +* **deps:** Update dependency @googleapis/iam to v4 ([#1482](https://github.com/googleapis/google-auth-library-nodejs/issues/1482)) ([00d6135](https://github.com/googleapis/google-auth-library-nodejs/commit/00d6135f35a1aa193d50fad6b3ec28a7fda9df66)) +* **deps:** Update dependency @googleapis/iam to v5 ([#1526](https://github.com/googleapis/google-auth-library-nodejs/issues/1526)) ([a1f9835](https://github.com/googleapis/google-auth-library-nodejs/commit/a1f9835fe155722206046d6bb5b56f9e53f2fe9a)) +* **deps:** Update dependency @googleapis/iam to v6 ([#1536](https://github.com/googleapis/google-auth-library-nodejs/issues/1536)) ([86a4de8](https://github.com/googleapis/google-auth-library-nodejs/commit/86a4de82c0de3efeb4b9b05a6ef34bd98cce398c)) +* **deps:** Update dependency @googleapis/iam to v7 ([#1538](https://github.com/googleapis/google-auth-library-nodejs/issues/1538)) ([fdb67dc](https://github.com/googleapis/google-auth-library-nodejs/commit/fdb67dc634f7fa2fcf8977d6d488cc91e9a7cccb)) +* Do not call metadata server if security creds and region are retrievable through environment vars ([#1493](https://github.com/googleapis/google-auth-library-nodejs/issues/1493)) ([d4de941](https://github.com/googleapis/google-auth-library-nodejs/commit/d4de9412e12f1f6f23f2a7c0d176dc5d2543e607)) +* Removing 3pi config URL validation ([#1517](https://github.com/googleapis/google-auth-library-nodejs/issues/1517)) ([a278d19](https://github.com/googleapis/google-auth-library-nodejs/commit/a278d19a0b211b13e5cf5176d40128e704b55780)) +* Removing aws url validation ([#1531](https://github.com/googleapis/google-auth-library-nodejs/issues/1531)) ([f4d9335](https://github.com/googleapis/google-auth-library-nodejs/commit/f4d933579cb5b9e50adf6e679a73cc78388ad8f8)) + +## [8.7.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.6.0...v8.7.0) (2022-11-08) + + +### Features + +* Introduce environment variable for quota project ([#1478](https://github.com/googleapis/google-auth-library-nodejs/issues/1478)) ([8706abc](https://github.com/googleapis/google-auth-library-nodejs/commit/8706abc64bb7d7b4336597589abb011150015a8c)) + + +### Bug Fixes + +* **deps:** Update dependency puppeteer to v19 ([#1476](https://github.com/googleapis/google-auth-library-nodejs/issues/1476)) ([86b258d](https://github.com/googleapis/google-auth-library-nodejs/commit/86b258da699810ead624419333a1a7f998f1b376)) +* Validate url domain for aws metadata urls ([#1484](https://github.com/googleapis/google-auth-library-nodejs/issues/1484)) ([6dc4e58](https://github.com/googleapis/google-auth-library-nodejs/commit/6dc4e583dfd3aa3030dfbf959ee1c68a259abe2f)) + +## [8.6.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.2...v8.6.0) (2022-10-06) + + +### Features + +* Adding validation for psc endpoints ([#1473](https://github.com/googleapis/google-auth-library-nodejs/issues/1473)) ([4bbd13f](https://github.com/googleapis/google-auth-library-nodejs/commit/4bbd13fbf9081e004209d0ffc336648cff0c529e)) +* **samples:** Auth samples ([#1444](https://github.com/googleapis/google-auth-library-nodejs/issues/1444)) ([137883a](https://github.com/googleapis/google-auth-library-nodejs/commit/137883aff56c9e847abb6445c89a76a27536fe26)) + +## [8.5.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.1...v8.5.2) (2022-09-22) + + +### Bug Fixes + +* **deps:** Update dependency puppeteer to v17 ([#1452](https://github.com/googleapis/google-auth-library-nodejs/issues/1452)) ([b317b59](https://github.com/googleapis/google-auth-library-nodejs/commit/b317b5963c18a598fceb85d4f32cc8cd64bb9b7b)) +* **deps:** Update dependency puppeteer to v18 ([#1471](https://github.com/googleapis/google-auth-library-nodejs/issues/1471)) ([9a2a918](https://github.com/googleapis/google-auth-library-nodejs/commit/9a2a9188c674c41a6a67095c3b102a7dc545fff5)) + +## [8.5.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.5.0...v8.5.1) (2022-08-31) + + +### Bug Fixes + +* do not use #private ([#1454](https://github.com/googleapis/google-auth-library-nodejs/issues/1454)) ([6c30274](https://github.com/googleapis/google-auth-library-nodejs/commit/6c302746c9eb4778619dec5f2f5f6e940af8086a)) + +## [8.5.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.4.0...v8.5.0) (2022-08-31) + + +### Features + +* Support Not Requiring `projectId` When Not Required ([#1448](https://github.com/googleapis/google-auth-library-nodejs/issues/1448)) ([b37489b](https://github.com/googleapis/google-auth-library-nodejs/commit/b37489b6bc17645d3ea23fbceb2326adb296240b)) + + +### Bug Fixes + +* add hashes to requirements.txt ([#1544](https://github.com/googleapis/google-auth-library-nodejs/issues/1544)) ([#1449](https://github.com/googleapis/google-auth-library-nodejs/issues/1449)) ([54afa8e](https://github.com/googleapis/google-auth-library-nodejs/commit/54afa8ef184c8a68c9930d67d850b7334c28ecaf)) +* remove `projectId` check for `signBlob` calls ([6c04661](https://github.com/googleapis/google-auth-library-nodejs/commit/6c04661c6950532a8239cb95f0509a9d5368ffcc)) + +## [8.4.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.3.0...v8.4.0) (2022-08-23) + + +### Features + +* adding configurable token lifespan support ([#1441](https://github.com/googleapis/google-auth-library-nodejs/issues/1441)) ([178e3b8](https://github.com/googleapis/google-auth-library-nodejs/commit/178e3b83104f5a050f09e17d522d36c8feca632c)) + + +### Bug Fixes + +* **functions:** clarify auth comments ([#1427](https://github.com/googleapis/google-auth-library-nodejs/issues/1427)) ([7e06732](https://github.com/googleapis/google-auth-library-nodejs/commit/7e067327634281bba948c9cc6bc99c7ab860f827)) + +## [8.3.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.2.0...v8.3.0) (2022-08-19) + + +### Features + +* Add `generateIdToken` support for `Impersonated` Clients ([#1439](https://github.com/googleapis/google-auth-library-nodejs/issues/1439)) ([4ace981](https://github.com/googleapis/google-auth-library-nodejs/commit/4ace981ba1d37a9a00201a1c91e1b79c8cdb5cec)) + +## [8.2.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.1.1...v8.2.0) (2022-08-11) + + +### Features + +* adds Pluggable Auth support ([#1437](https://github.com/googleapis/google-auth-library-nodejs/issues/1437)) ([ed7ef7a](https://github.com/googleapis/google-auth-library-nodejs/commit/ed7ef7a5d1fa6bf5d06bdaab278052fd3930fb7f)) + +## [8.1.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.1.0...v8.1.1) (2022-07-08) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v15 ([#1424](https://github.com/googleapis/google-auth-library-nodejs/issues/1424)) ([1462f2c](https://github.com/googleapis/google-auth-library-nodejs/commit/1462f2c533da22b0a07130b25c41df046d4d713d)) + +## [8.1.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.3...v8.1.0) (2022-06-30) + + +### Features + +* handle impersonated ADC ([#1425](https://github.com/googleapis/google-auth-library-nodejs/issues/1425)) ([835be89](https://github.com/googleapis/google-auth-library-nodejs/commit/835be89687c2dff19f34e8b55645d3d611339e14)) + +## [8.0.3](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.2...v8.0.3) (2022-06-17) + + +### Bug Fixes + +* **deps:** update dependency @googleapis/iam to v3 ([#1421](https://github.com/googleapis/google-auth-library-nodejs/issues/1421)) ([0dc8857](https://github.com/googleapis/google-auth-library-nodejs/commit/0dc88572958520ddd4834aa982d41b98851895d9)) + +### [8.0.2](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.1...v8.0.2) (2022-04-27) + + +### Bug Fixes + +* Fixing Implementation of GoogleAuth.sign() for external account credentials ([#1397](https://github.com/googleapis/google-auth-library-nodejs/issues/1397)) ([b0ddb75](https://github.com/googleapis/google-auth-library-nodejs/commit/b0ddb7512fb9ed1e51b6874b7376d7e1f26be644)) + +### [8.0.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v8.0.0...v8.0.1) (2022-04-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v5 ([#1398](https://github.com/googleapis/google-auth-library-nodejs/issues/1398)) ([cbd7d4f](https://github.com/googleapis/google-auth-library-nodejs/commit/cbd7d4f471046afa05ff4539c9a93bba998b318c)) +* **deps:** update dependency google-auth-library to v8 ([#1399](https://github.com/googleapis/google-auth-library-nodejs/issues/1399)) ([9a8be63](https://github.com/googleapis/google-auth-library-nodejs/commit/9a8be636eaea979979426558dca40def9c0e569e)) + +## [8.0.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.14.1...v8.0.0) (2022-04-20) + + +### ⚠ BREAKING CHANGES + +* Set Node v12 to minimum supported version & Upgrade TypeScript (#1392) +* remove deprecated DeprecatedGetClientOptions (#1393) + +### Code Refactoring + +* remove deprecated DeprecatedGetClientOptions ([#1393](https://github.com/googleapis/google-auth-library-nodejs/issues/1393)) ([9c02941](https://github.com/googleapis/google-auth-library-nodejs/commit/9c02941e52514f8e3b07fedb01439fc313046063)) + + +### Build System + +* Set Node v12 to minimum supported version & Upgrade TypeScript ([#1392](https://github.com/googleapis/google-auth-library-nodejs/issues/1392)) ([b7bcedb](https://github.com/googleapis/google-auth-library-nodejs/commit/b7bcedb9e4ec24217a861016f3378460af7598c7)) + +### [7.14.1](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.14.0...v7.14.1) (2022-03-09) + + +### Bug Fixes + +* **serverless:** clean up ID token example ([#1380](https://github.com/googleapis/google-auth-library-nodejs/issues/1380)) ([db27f1b](https://github.com/googleapis/google-auth-library-nodejs/commit/db27f1bc31efaa9df28da2e0a1229ee3ebea0751)) + +## [7.14.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.13.0...v7.14.0) (2022-02-22) + + +### Features + +* Add AWS Session Token to Metadata Requests ([#1363](https://github.com/googleapis/google-auth-library-nodejs/issues/1363)) ([9ea3e98](https://github.com/googleapis/google-auth-library-nodejs/commit/9ea3e98582e8a69dedef89952ae08d64c49f48ef)) + + +### Bug Fixes + +* Rename `auth` to `authClient` & Use Generics for `AuthClient` ([#1371](https://github.com/googleapis/google-auth-library-nodejs/issues/1371)) ([8373000](https://github.com/googleapis/google-auth-library-nodejs/commit/8373000b2240cb694e9492f849e5cc7e13c89b1a)) + +## [7.13.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.12.0...v7.13.0) (2022-02-17) + + +### Features + +* Support instantiating `GoogleAuth` with an `AuthClient` ([#1364](https://github.com/googleapis/google-auth-library-nodejs/issues/1364)) ([8839b5b](https://github.com/googleapis/google-auth-library-nodejs/commit/8839b5b12531ae4966b38795ed818ad138eb326a)) + +## [7.12.0](https://github.com/googleapis/google-auth-library-nodejs/compare/v7.11.0...v7.12.0) (2022-02-09) + + +### Features + +* Export `AuthClient` ([#1361](https://github.com/googleapis/google-auth-library-nodejs/issues/1361)) ([88f42ec](https://github.com/googleapis/google-auth-library-nodejs/commit/88f42eca1a02ab5768e02538f2dc639d196de9fb)) + +## [7.11.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.4...v7.11.0) (2021-12-15) + + +### Features + +* adds README, samples and integration tests for downscoping with CAB ([#1311](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1311)) ([4549116](https://www.github.com/googleapis/google-auth-library-nodejs/commit/454911637eca6a1272a729b2b2dfcf690c53fe29)) + +### [7.10.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.3...v7.10.4) (2021-12-13) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v13 ([#1334](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1334)) ([e05548d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e05548dfb431638d618aa8b846d0944541387033)) + +### [7.10.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.2...v7.10.3) (2021-11-29) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v12 ([#1325](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1325)) ([110ddc2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/110ddc245b768888a88d8c7211f0b0391326cc10)) + +### [7.10.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.1...v7.10.2) (2021-11-03) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v11 ([#1312](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1312)) ([b3ba9ac](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b3ba9ac834de86022a78ac540995fa35857d6670)) + +### [7.10.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.10.0...v7.10.1) (2021-10-14) + + +### Bug Fixes + +* **security:** explicitly update keypair dependency ([94401a6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/94401a6b73eeaf370aeaf9cbf92f23f4fc7bde9b)) + +## [7.10.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.2...v7.10.0) (2021-09-28) + + +### Features + +* add workforce config support. ([#1251](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1251)) ([fe29e38](https://www.github.com/googleapis/google-auth-library-nodejs/commit/fe29e384820f1c97ca62478c55813aad3f8ecbea)) + +### [7.9.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.1...v7.9.2) (2021-09-16) + + +### Bug Fixes + +* **deps:** update dependency @googleapis/iam to v2 ([#1253](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1253)) ([59b8243](https://www.github.com/googleapis/google-auth-library-nodejs/commit/59b82436d6b9b68b6ae0e0e81d4b797d964acae2)) + +### [7.9.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.9.0...v7.9.1) (2021-09-02) + + +### Bug Fixes + +* **build:** switch to main branch ([#1249](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1249)) ([2c72de4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c72de4ef7c07ddb4586094faf117715ab50143e)) + +## [7.9.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.8.0...v7.9.0) (2021-09-02) + + +### Features + +* wire up implementation of DownscopedClient. ([#1232](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1232)) ([baa1318](https://www.github.com/googleapis/google-auth-library-nodejs/commit/baa13185836bb299b769c034bfb7d37231eea8d1)) + +## [7.8.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.7.0...v7.8.0) (2021-08-30) + + +### Features + +* use self-signed JWTs if alwaysUseJWTAccessWithScope is true ([#1196](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1196)) ([ad3f652](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ad3f652e8e859d8d8a69dfe9df01d001862fe0ae)) + +## [7.7.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.2...v7.7.0) (2021-08-27) + + +### Features + +* add refreshHandler callback to OAuth 2.0 client to handle token refresh ([#1213](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1213)) ([2fcab77](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2fcab77a1fae85489829f22ec95cc66b0b284342)) + +### [7.6.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.1...v7.6.2) (2021-08-17) + + +### Bug Fixes + +* validate token_url and service_account_impersonation_url ([#1229](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1229)) ([0360bb7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/0360bb722aaa082c36c1e1919bf5df27efbe15b3)) + +### [7.6.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.6.0...v7.6.1) (2021-08-13) + + +### Bug Fixes + +* use updated variable name for self-signed JWTs ([#1233](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1233)) ([ef41fe5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ef41fe59b423125c607e3ad20896a35f12f5365b)) + +## [7.6.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.5.0...v7.6.0) (2021-08-10) + + +### Features + +* add GoogleAuth.sign() support to external account client ([#1227](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1227)) ([1ca3b73](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1ca3b733427d951ed624e1129fca510d84d5d0fe)) + +## [7.5.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.4.1...v7.5.0) (2021-08-04) + + +### Features + +* Adds support for STS response not returning expires_in field. ([#1216](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1216)) ([24bb456](https://www.github.com/googleapis/google-auth-library-nodejs/commit/24bb4568820c2692b1b3ff29835a38fdb3f28c9e)) + +### [7.4.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.4.0...v7.4.1) (2021-07-29) + + +### Bug Fixes + +* **downscoped-client:** bug fixes for downscoped client implementation. ([#1219](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1219)) ([4fbe67e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4fbe67e08bce3f31193d3bb7b93c4cc1251e66a2)) + +## [7.4.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.3.0...v7.4.0) (2021-07-29) + + +### Features + +* **impersonated:** add impersonated credentials auth ([#1207](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1207)) ([ab1cd31](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ab1cd31e07d45424f614e0401d1068df2fbd914c)) + +## [7.3.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.2.0...v7.3.0) (2021-07-02) + + +### Features + +* add useJWTAccessWithScope and defaultServicePath variable ([#1204](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1204)) ([79e100e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/79e100e9ddc64f34e34d0e91c8188f1818e33a1c)) + +## [7.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.2...v7.2.0) (2021-06-30) + + +### Features + +* Implement DownscopedClient#getAccessToken() and unit test ([#1201](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1201)) ([faa6677](https://www.github.com/googleapis/google-auth-library-nodejs/commit/faa6677fe72c8fc671a2190abe45897ac58cc42e)) + +### [7.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.1...v7.1.2) (2021-06-10) + + +### Bug Fixes + +* use iam client library to setup test ([#1173](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1173)) ([74ac5db](https://www.github.com/googleapis/google-auth-library-nodejs/commit/74ac5db59f9eff8fa4f3bdb6acc0647a1a4f491f)) + +### [7.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.1.0...v7.1.1) (2021-06-02) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v10 ([#1182](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1182)) ([003e3ee](https://www.github.com/googleapis/google-auth-library-nodejs/commit/003e3ee5d8aeb749c07a4a4db2b75a5882988cc3)) + +## [7.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.4...v7.1.0) (2021-05-21) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#1174](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1174)) ([f377adc](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f377adc01ca16687bb905aa14f6c62a23e90aaa9)) +* add detection for Cloud Run ([#1177](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1177)) ([4512363](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4512363cf712dff5f178af0e7022c258775dfec7)) + +### [7.0.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.3...v7.0.4) (2021-04-06) + + +### Bug Fixes + +* do not suppress external project ID determination errors ([#1153](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1153)) ([6c1c91d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/6c1c91dac6c31d762b03774a385d780a824fce97)) + +### [7.0.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.2...v7.0.3) (2021-03-23) + + +### Bug Fixes + +* support AWS_DEFAULT_REGION for determining AWS region ([#1149](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1149)) ([9ae2d30](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9ae2d30c15c9bce3cae70ccbe6e227c096005695)) + +### [7.0.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.1...v7.0.2) (2021-02-10) + + +### Bug Fixes + +* expose `BaseExternalAccountClient` and `BaseExternalAccountClientOptions` ([#1142](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1142)) ([1d62c04](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1d62c04dfa117b6a81e8c78385dc72792369cf21)) + +### [7.0.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v7.0.0...v7.0.1) (2021-02-09) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v7 ([#1140](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1140)) ([9c717f7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9c717f70ca155b24edd5511b6038679db25b85b7)) + +## [7.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.6...v7.0.0) (2021-02-08) + + +### ⚠ BREAKING CHANGES + +* integrates external_accounts with `GoogleAuth` and ADC (#1052) +* workload identity federation support (#1131) + +### Features + +* adds service account impersonation to `ExternalAccountClient` ([#1041](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1041)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* adds text/json credential_source support to IdentityPoolClients ([#1059](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1059)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines `ExternalAccountClient` used to instantiate external account clients ([#1050](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1050)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines `IdentityPoolClient` used for K8s and Azure workloads ([#1042](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1042)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* defines ExternalAccountClient abstract class for external_account credentials ([#1030](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1030)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* get AWS region from environment variable ([#1067](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1067)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* implements AWS signature version 4 for signing requests ([#1047](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1047)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* implements the OAuth token exchange spec based on rfc8693 ([#1026](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1026)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* integrates external_accounts with `GoogleAuth` and ADC ([#1052](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1052)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) +* workload identity federation support ([#1131](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1131)) ([997f124](https://www.github.com/googleapis/google-auth-library-nodejs/commit/997f124a5c02dfa44879a759bf701a9fa4c3ba90)) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v6 ([#1129](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1129)) ([5240fb0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5240fb0e7ba5503d562659a0d1d7c952bc44ce0e)) +* **deps:** update dependency puppeteer to v7 ([#1134](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1134)) ([02d0d73](https://www.github.com/googleapis/google-auth-library-nodejs/commit/02d0d73a5f0d2fc7de9b13b160e4e7074652f9d0)) + +### [6.1.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.5...v6.1.6) (2021-01-27) + + +### Bug Fixes + +* call addSharedMetadataHeaders even when token has not expired ([#1116](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1116)) ([aad043d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/aad043d20df3f1e44f56c58a21f15000b6fe970d)) + +### [6.1.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.4...v6.1.5) (2021-01-22) + + +### Bug Fixes + +* support PEM and p12 when using factory ([#1120](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1120)) ([c2ead4c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/c2ead4cc7650f100b883c9296fce628f17085992)) + +### [6.1.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.3...v6.1.4) (2020-12-22) + + +### Bug Fixes + +* move accessToken to headers instead of parameter ([#1108](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1108)) ([67b0cc3](https://www.github.com/googleapis/google-auth-library-nodejs/commit/67b0cc3077860a1583bcf18ce50aeff58bbb5496)) + +### [6.1.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.2...v6.1.3) (2020-10-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#1086](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1086)) ([f2678ff](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f2678ff5f8f5a0ee33924278b58e0a6e3122cb12)) + +### [6.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.1...v6.1.2) (2020-10-19) + + +### Bug Fixes + +* update gcp-metadata to catch a json-bigint security fix ([#1078](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1078)) ([125fe09](https://www.github.com/googleapis/google-auth-library-nodejs/commit/125fe0924a2206ebb0c83ece9947524e7b135803)) + +### [6.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.1.0...v6.1.1) (2020-10-06) + + +### Bug Fixes + +* **deps:** upgrade gtoken ([#1064](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1064)) ([9116f24](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9116f247486d6376feca505bbfa42a91d5e579e2)) + +## [6.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.8...v6.1.0) (2020-09-22) + + +### Features + +* default self-signed JWTs ([#1054](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1054)) ([b4d139d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b4d139d9ee27f886ca8cc5478615c052700fff48)) + +### [6.0.8](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.7...v6.0.8) (2020-08-13) + + +### Bug Fixes + +* **deps:** roll back dependency google-auth-library to ^6.0.6 ([#1033](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1033)) ([eb54ee9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/eb54ee9369d9e5a01d164ccf7f826858d44827fd)) + +### [6.0.7](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.6...v6.0.7) (2020-08-11) + + +### Bug Fixes + +* migrate token info API to not pass token in query string ([#991](https://www.github.com/googleapis/google-auth-library-nodejs/issues/991)) ([a7e5701](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a7e5701a8394d79fe93d28794467747a23cf9ff4)) + +### [6.0.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.5...v6.0.6) (2020-07-30) + + +### Bug Fixes + +* **types:** include scope in credentials type ([#1007](https://www.github.com/googleapis/google-auth-library-nodejs/issues/1007)) ([a2b7d23](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a2b7d23caa5bf253c7c0756396f1b58216182089)) + +### [6.0.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.4...v6.0.5) (2020-07-13) + + +### Bug Fixes + +* **deps:** update dependency lru-cache to v6 ([#995](https://www.github.com/googleapis/google-auth-library-nodejs/issues/995)) ([3c07566](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3c07566f0384611227030e9b381fc6e6707e526b)) + +### [6.0.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.3...v6.0.4) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#993](https://www.github.com/googleapis/google-auth-library-nodejs/issues/993)) ([ad12ceb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ad12ceb3309b7db7394fe1fe1d5e7b2e4901141d)) + +### [6.0.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.2...v6.0.3) (2020-07-06) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v5 ([#986](https://www.github.com/googleapis/google-auth-library-nodejs/issues/986)) ([7cfe6f2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7cfe6f200b9c04fe4805d1b1e3a2e03a9668e551)) + +### [6.0.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.1...v6.0.2) (2020-06-16) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v4 ([#976](https://www.github.com/googleapis/google-auth-library-nodejs/issues/976)) ([9ddfb9b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9ddfb9befedd10d6c82cbf799c1afea9ba10d444)) +* **tsc:** audience property is not mandatory on verifyIdToken ([#972](https://www.github.com/googleapis/google-auth-library-nodejs/issues/972)) ([17a7e24](https://www.github.com/googleapis/google-auth-library-nodejs/commit/17a7e247cdb798ddf3173cf44ab762665cbce0a1)) +* **types:** add locale property to idtoken ([#974](https://www.github.com/googleapis/google-auth-library-nodejs/issues/974)) ([ebf9bed](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ebf9beda30f251da6adb9ec0bf943019ea0171c5)), closes [#973](https://www.github.com/googleapis/google-auth-library-nodejs/issues/973) + +### [6.0.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v6.0.0...v6.0.1) (2020-05-21) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v6 ([#930](https://www.github.com/googleapis/google-auth-library-nodejs/issues/930)) ([81fdabe](https://www.github.com/googleapis/google-auth-library-nodejs/commit/81fdabe6fb7f0b8c5114c0d835680a28def822e2)) +* apache license URL ([#468](https://www.github.com/googleapis/google-auth-library-nodejs/issues/468)) ([#936](https://www.github.com/googleapis/google-auth-library-nodejs/issues/936)) ([53831cf](https://www.github.com/googleapis/google-auth-library-nodejs/commit/53831cf72f6669d13692c5665fef5062dc8f6c1a)) +* **deps:** update dependency puppeteer to v3 ([#944](https://www.github.com/googleapis/google-auth-library-nodejs/issues/944)) ([4d6fba0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4d6fba034cb0e70092656e9aff1ba419fdfca880)) +* fixing tsc error caused by @types/node update ([#965](https://www.github.com/googleapis/google-auth-library-nodejs/issues/965)) ([b94edb0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/b94edb0716572593e4e9cb8a9b9bbfa567f71625)) +* gcp-metadata now warns rather than throwing ([#956](https://www.github.com/googleapis/google-auth-library-nodejs/issues/956)) ([89e16c2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/89e16c2401101d086a8f9d05b8f0771b5c74157c)) + +## [6.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.10.1...v6.0.0) (2020-03-26) + + +### ⚠ BREAKING CHANGES + +* typescript@3.7.x introduced some breaking changes in +generated code. +* require node 10 in engines field (#926) +* remove deprecated methods (#906) + +### Features + +* require node 10 in engines field ([#926](https://www.github.com/googleapis/google-auth-library-nodejs/issues/926)) ([d89c59a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d89c59a316e9ca5b8c351128ee3e2d91e9729d5c)) + + +### Bug Fixes + +* do not warn for SDK creds ([#905](https://www.github.com/googleapis/google-auth-library-nodejs/issues/905)) ([9536840](https://www.github.com/googleapis/google-auth-library-nodejs/commit/9536840f88e77f747bbbc2c1b5b4289018fc23c9)) +* use iamcredentials API to sign blobs ([#908](https://www.github.com/googleapis/google-auth-library-nodejs/issues/908)) ([7b8e4c5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7b8e4c52e31bb3d448c3ff8c05002188900eaa04)) +* **deps:** update dependency gaxios to v3 ([#917](https://www.github.com/googleapis/google-auth-library-nodejs/issues/917)) ([1f4bf61](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1f4bf6128a0dcf22cfe1ec492b2192f513836cb2)) +* **deps:** update dependency gcp-metadata to v4 ([#918](https://www.github.com/googleapis/google-auth-library-nodejs/issues/918)) ([d337131](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d337131d009cc1f8182f7a1f8a9034433ee3fbf7)) +* **types:** add additional fields to TokenInfo ([#907](https://www.github.com/googleapis/google-auth-library-nodejs/issues/907)) ([5b48eb8](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5b48eb86c108c47d317a0eb96b47c0cae86f98cb)) + + +### Build System + +* update to latest gts and TypeScript ([#927](https://www.github.com/googleapis/google-auth-library-nodejs/issues/927)) ([e11e18c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e11e18cb33eb60a666980d061c54bb8891cdd242)) + + +### Miscellaneous Chores + +* remove deprecated methods ([#906](https://www.github.com/googleapis/google-auth-library-nodejs/issues/906)) ([f453fb7](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f453fb7d8355e6dc74800b18d6f43c4e91d4acc9)) + +### [5.10.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.10.0...v5.10.1) (2020-02-25) + + +### Bug Fixes + +* if GCF environment detected, increase library timeout ([#899](https://www.github.com/googleapis/google-auth-library-nodejs/issues/899)) ([2577ff2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2577ff28bf22dfc58bd09e7365471c16f359f109)) + +## [5.10.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.2...v5.10.0) (2020-02-20) + + +### Features + +* support for verifying ES256 and retrieving IAP public keys ([#887](https://www.github.com/googleapis/google-auth-library-nodejs/issues/887)) ([a98e386](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a98e38678dc4a5e963356378c75c658e36dccd01)) + + +### Bug Fixes + +* **docs:** correct links in README ([f6a3194](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f6a3194ff6df97d4fd833ae69ec80c05eab46e7b)), closes [#891](https://www.github.com/googleapis/google-auth-library-nodejs/issues/891) + +### [5.9.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.1...v5.9.2) (2020-01-28) + + +### Bug Fixes + +* populate credentials.refresh_token if provided ([#881](https://www.github.com/googleapis/google-auth-library-nodejs/issues/881)) ([63c4637](https://www.github.com/googleapis/google-auth-library-nodejs/commit/63c4637c57e4113a7b01bf78933a8bff0356c104)) + +### [5.9.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.9.0...v5.9.1) (2020-01-16) + + +### Bug Fixes + +* ensures GCE metadata sets email field for ID tokens ([#874](https://www.github.com/googleapis/google-auth-library-nodejs/issues/874)) ([e45b73d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e45b73dbb22e1c2d8115882006a21337c7d9bd63)) + +## [5.9.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.8.0...v5.9.0) (2020-01-14) + + +### Features + +* add methods for fetching and using id tokens ([#867](https://www.github.com/googleapis/google-auth-library-nodejs/issues/867)) ([8036f1a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8036f1a51d1a103b08daf62c7ce372c9f68cd9d4)) +* export LoginTicket and TokenPayload ([#870](https://www.github.com/googleapis/google-auth-library-nodejs/issues/870)) ([539ea5e](https://www.github.com/googleapis/google-auth-library-nodejs/commit/539ea5e804386b79ecf469838fff19465aeb2ca6)) + +## [5.8.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.7.0...v5.8.0) (2020-01-06) + + +### Features + +* cache results of getEnv() ([#857](https://www.github.com/googleapis/google-auth-library-nodejs/issues/857)) ([d4545a9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d4545a9001184fac0b67e7073e463e3efd345037)) + + +### Bug Fixes + +* **deps:** update dependency jws to v4 ([#851](https://www.github.com/googleapis/google-auth-library-nodejs/issues/851)) ([71366d4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/71366d43406047ce9e1d818d59a14191fb678e3a)) + +## [5.7.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.6.1...v5.7.0) (2019-12-10) + + +### Features + +* make x-goog-user-project work for additional auth clients ([#848](https://www.github.com/googleapis/google-auth-library-nodejs/issues/848)) ([46af865](https://www.github.com/googleapis/google-auth-library-nodejs/commit/46af865172103c6f28712d78b30c2291487cbe86)) + +### [5.6.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.6.0...v5.6.1) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#845](https://www.github.com/googleapis/google-auth-library-nodejs/issues/845)) ([a9c6e92](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a9c6e9284efe8102974c57c9824ed6275d743c7a)) +* **docs:** improve types and docs for generateCodeVerifierAsync ([#840](https://www.github.com/googleapis/google-auth-library-nodejs/issues/840)) ([04dae9c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/04dae9c271f0099025188489c61fd245d482832b)) + +## [5.6.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.5.1...v5.6.0) (2019-12-02) + + +### Features + +* populate x-goog-user-project for requestAsync ([#837](https://www.github.com/googleapis/google-auth-library-nodejs/issues/837)) ([5a068fb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5a068fb8f5a3827ab70404f1d9699a97f962bdad)) +* set x-goog-user-project header, with quota_project from default credentials ([#829](https://www.github.com/googleapis/google-auth-library-nodejs/issues/829)) ([3240d16](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3240d16f05171781fe6d70d64c476bceb25805a5)) + + +### Bug Fixes + +* **deps:** update dependency puppeteer to v2 ([#821](https://www.github.com/googleapis/google-auth-library-nodejs/issues/821)) ([2c04117](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c0411708761cc7debdda1af1e593d82cb4aed31)) +* **docs:** add jsdoc-region-tag plugin ([#826](https://www.github.com/googleapis/google-auth-library-nodejs/issues/826)) ([558677f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/558677fd90d3451e9ac4bf6d0b98907e3313f287)) +* expand on x-goog-user-project to handle auth.getClient() ([#831](https://www.github.com/googleapis/google-auth-library-nodejs/issues/831)) ([3646b7f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/3646b7f9deb296aaff602dd2168ce93f014ce840)) +* use quota_project_id field instead of quota_project ([#832](https://www.github.com/googleapis/google-auth-library-nodejs/issues/832)) ([8933966](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8933966659f3b07f5454a2756fa52d92fea147d2)) + +### [5.5.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.5.0...v5.5.1) (2019-10-22) + + +### Bug Fixes + +* **deps:** update gaxios dependency ([#817](https://www.github.com/googleapis/google-auth-library-nodejs/issues/817)) ([6730698](https://www.github.com/googleapis/google-auth-library-nodejs/commit/6730698b876eb52889acfead33bc4af52a8a7ba5)) +* don't append x-goog-api-client multiple times ([#820](https://www.github.com/googleapis/google-auth-library-nodejs/issues/820)) ([a46b271](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a46b271947b635377eacbdfcd22ae363ce9260a1)) + +## [5.5.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.4.1...v5.5.0) (2019-10-14) + + +### Features + +* **refresh:** add forceRefreshOnFailure flag for refreshing token on error ([#790](https://www.github.com/googleapis/google-auth-library-nodejs/issues/790)) ([54cf477](https://www.github.com/googleapis/google-auth-library-nodejs/commit/54cf4770f487fd1db48f2444c86109ca97608ed1)) + +### [5.4.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.4.0...v5.4.1) (2019-10-10) + + +### Bug Fixes + +* **deps:** updats to gcp-metadata with debug option ([#811](https://www.github.com/googleapis/google-auth-library-nodejs/issues/811)) ([744e3e8](https://www.github.com/googleapis/google-auth-library-nodejs/commit/744e3e8fea223eb4fb115ef0a4d36ad88fc6921a)) + +## [5.4.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.3.0...v5.4.0) (2019-10-08) + + +### Features + +* do not deprecate refreshAccessToken ([#804](https://www.github.com/googleapis/google-auth-library-nodejs/issues/804)) ([f05de11](https://www.github.com/googleapis/google-auth-library-nodejs/commit/f05de11)) + +## [5.3.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.2...v5.3.0) (2019-09-27) + + +### Features + +* if token expires soon, force refresh ([#794](https://www.github.com/googleapis/google-auth-library-nodejs/issues/794)) ([fecd4f4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/fecd4f4)) + +### [5.2.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.1...v5.2.2) (2019-09-17) + + +### Bug Fixes + +* **deps:** update to gcp-metadata and address envDetect performance issues ([#787](https://www.github.com/googleapis/google-auth-library-nodejs/issues/787)) ([651b5d4](https://www.github.com/googleapis/google-auth-library-nodejs/commit/651b5d4)) + +### [5.2.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.2.0...v5.2.1) (2019-09-06) + + +### Bug Fixes + +* **deps:** nock@next has types that work with our libraries ([#783](https://www.github.com/googleapis/google-auth-library-nodejs/issues/783)) ([a253709](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a253709)) +* **docs:** fix variable name in README.md ([#782](https://www.github.com/googleapis/google-auth-library-nodejs/issues/782)) ([d8c70b9](https://www.github.com/googleapis/google-auth-library-nodejs/commit/d8c70b9)) + +## [5.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.2...v5.2.0) (2019-08-09) + + +### Features + +* populate x-goog-api-client header for auth ([#772](https://www.github.com/googleapis/google-auth-library-nodejs/issues/772)) ([526dcf6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/526dcf6)) + +### [5.1.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.1...v5.1.2) (2019-08-05) + + +### Bug Fixes + +* **deps:** upgrade to gtoken 4.x ([#763](https://www.github.com/googleapis/google-auth-library-nodejs/issues/763)) ([a1fcc25](https://www.github.com/googleapis/google-auth-library-nodejs/commit/a1fcc25)) + +### [5.1.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.1.0...v5.1.1) (2019-07-29) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v5 ([#759](https://www.github.com/googleapis/google-auth-library-nodejs/issues/759)) ([e32a12b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e32a12b)) + +## [5.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v5.0.0...v5.1.0) (2019-07-24) + + +### Features + +* **types:** expose ProjectIdCallback interface ([#753](https://www.github.com/googleapis/google-auth-library-nodejs/issues/753)) ([5577f0d](https://www.github.com/googleapis/google-auth-library-nodejs/commit/5577f0d)) + +## [5.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.6...v5.0.0) (2019-07-23) + + +### ⚠ BREAKING CHANGES + +* getOptions() no longer accepts GoogleAuthOptions (#749) + +### Code Refactoring + +* getOptions() no longer accepts GoogleAuthOptions ([#749](https://www.github.com/googleapis/google-auth-library-nodejs/issues/749)) ([ba58e3b](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ba58e3b)) + +### [4.2.6](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.5...v4.2.6) (2019-07-23) + + +### Bug Fixes + +* use FUNCTION_TARGET to detect GCF 10 and above ([#748](https://www.github.com/googleapis/google-auth-library-nodejs/issues/748)) ([ca17685](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ca17685)) + +### [4.2.5](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.4...v4.2.5) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#742](https://www.github.com/googleapis/google-auth-library-nodejs/issues/742)) ([7901456](https://www.github.com/googleapis/google-auth-library-nodejs/commit/7901456)) + +### [4.2.4](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.3...v4.2.4) (2019-06-25) + + +### Bug Fixes + +* only require fast-text-encoding when needed ([#740](https://www.github.com/googleapis/google-auth-library-nodejs/issues/740)) ([04fcd77](https://www.github.com/googleapis/google-auth-library-nodejs/commit/04fcd77)) + +### [4.2.3](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.2...v4.2.3) (2019-06-24) + + +### Bug Fixes + +* feature detection to check for browser ([#738](https://www.github.com/googleapis/google-auth-library-nodejs/issues/738)) ([83a5ba5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/83a5ba5)) + +### [4.2.2](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.1...v4.2.2) (2019-06-18) + + +### Bug Fixes + +* **compute:** correctly specify scopes when fetching token ([#735](https://www.github.com/googleapis/google-auth-library-nodejs/issues/735)) ([4803e3c](https://www.github.com/googleapis/google-auth-library-nodejs/commit/4803e3c)) + +### [4.2.1](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.2.0...v4.2.1) (2019-06-14) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#733](https://www.github.com/googleapis/google-auth-library-nodejs/issues/733)) ([cfbbe2a](https://www.github.com/googleapis/google-auth-library-nodejs/commit/cfbbe2a)) + +## [4.2.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.1.0...v4.2.0) (2019-06-05) + + +### Bug Fixes + +* pad base64 strings for base64js ([#722](https://www.github.com/googleapis/google-auth-library-nodejs/issues/722)) ([81e0a23](https://www.github.com/googleapis/google-auth-library-nodejs/commit/81e0a23)) + + +### Features + +* make both crypto implementations support sign ([#727](https://www.github.com/googleapis/google-auth-library-nodejs/issues/727)) ([e445fb3](https://www.github.com/googleapis/google-auth-library-nodejs/commit/e445fb3)) + +## [4.1.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v4.0.0...v4.1.0) (2019-05-29) + + +### Bug Fixes + +* **deps:** update dependency google-auth-library to v4 ([#705](https://www.github.com/googleapis/google-auth-library-nodejs/issues/705)) ([2b13344](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2b13344)) + + +### Features + +* use X-Goog-Api-Key header ([#719](https://www.github.com/googleapis/google-auth-library-nodejs/issues/719)) ([35471d0](https://www.github.com/googleapis/google-auth-library-nodejs/commit/35471d0)) + +## [4.0.0](https://www.github.com/googleapis/google-auth-library-nodejs/compare/v3.1.2...v4.0.0) (2019-05-08) + + +### Bug Fixes + +* **deps:** update dependency arrify to v2 ([#684](https://www.github.com/googleapis/google-auth-library-nodejs/issues/684)) ([1757ee2](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1757ee2)) +* **deps:** update dependency gaxios to v2 ([#681](https://www.github.com/googleapis/google-auth-library-nodejs/issues/681)) ([770ad2f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/770ad2f)) +* **deps:** update dependency gcp-metadata to v2 ([#701](https://www.github.com/googleapis/google-auth-library-nodejs/issues/701)) ([be20528](https://www.github.com/googleapis/google-auth-library-nodejs/commit/be20528)) +* **deps:** update dependency gtoken to v3 ([#702](https://www.github.com/googleapis/google-auth-library-nodejs/issues/702)) ([2c538e5](https://www.github.com/googleapis/google-auth-library-nodejs/commit/2c538e5)) +* re-throw original exception and preserve message in compute client ([#668](https://www.github.com/googleapis/google-auth-library-nodejs/issues/668)) ([dffd1cc](https://www.github.com/googleapis/google-auth-library-nodejs/commit/dffd1cc)) +* surface original stack trace and message with errors ([#651](https://www.github.com/googleapis/google-auth-library-nodejs/issues/651)) ([8fb65eb](https://www.github.com/googleapis/google-auth-library-nodejs/commit/8fb65eb)) +* throw on missing refresh token in all cases ([#670](https://www.github.com/googleapis/google-auth-library-nodejs/issues/670)) ([0a02946](https://www.github.com/googleapis/google-auth-library-nodejs/commit/0a02946)) +* throw when adc cannot acquire a projectId ([#658](https://www.github.com/googleapis/google-auth-library-nodejs/issues/658)) ([ba48164](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ba48164)) +* **deps:** update dependency semver to v6 ([#655](https://www.github.com/googleapis/google-auth-library-nodejs/issues/655)) ([ec56c88](https://www.github.com/googleapis/google-auth-library-nodejs/commit/ec56c88)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#686](https://www.github.com/googleapis/google-auth-library-nodejs/issues/686)) ([377d5c6](https://www.github.com/googleapis/google-auth-library-nodejs/commit/377d5c6)) + + +### Features + +* support scopes on compute credentials ([#642](https://www.github.com/googleapis/google-auth-library-nodejs/issues/642)) ([1811b7f](https://www.github.com/googleapis/google-auth-library-nodejs/commit/1811b7f)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#686) + +## v3.1.2 + +03-22-2019 15:38 PDT + +### Implementation Changes +- fix: getCredential(): load credentials with getClient() ([#648](https://github.com/google/google-auth-library-nodejs/pull/648)) + +### Internal / Testing Changes +- chore: publish to npm using wombat ([#645](https://github.com/google/google-auth-library-nodejs/pull/645)) + +## v3.1.1 + +03-18-2019 08:32 PDT + +### Bug Fixes +- fix: Avoid loading fast-text-encoding if not in browser environment ([#627](https://github.com/google/google-auth-library-nodejs/pull/627)) + +### Dependencies +- fix(deps): update dependency gcp-metadata to v1 ([#632](https://github.com/google/google-auth-library-nodejs/pull/632)) + +### Documentation +- docs: update links in contrib guide ([#630](https://github.com/google/google-auth-library-nodejs/pull/630)) + +### Internal / Testing Changes +- build: use per-repo publish token ([#641](https://github.com/google/google-auth-library-nodejs/pull/641)) +- build: Add docuploader credentials to node publish jobs ([#639](https://github.com/google/google-auth-library-nodejs/pull/639)) +- build: use node10 to run samples-test, system-test etc ([#638](https://github.com/google/google-auth-library-nodejs/pull/638)) +- build: update release configuration +- chore(deps): update dependency @types/lru-cache to v5 ([#635](https://github.com/google/google-auth-library-nodejs/pull/635)) +- chore(deps): update dependency mocha to v6 +- chore: fix lint ([#631](https://github.com/google/google-auth-library-nodejs/pull/631)) +- build: use linkinator for docs test ([#628](https://github.com/google/google-auth-library-nodejs/pull/628)) +- chore(deps): update dependency @types/tmp to ^0.0.34 ([#629](https://github.com/google/google-auth-library-nodejs/pull/629)) +- build: create docs test npm scripts ([#625](https://github.com/google/google-auth-library-nodejs/pull/625)) +- build: test using @grpc/grpc-js in CI ([#624](https://github.com/google/google-auth-library-nodejs/pull/624)) + +## v3.1.0 + +02-08-2019 08:29 PST + +### Bug fixes +- fix: use key file when fetching project id ([#618](https://github.com/googleapis/google-auth-library-nodejs/pull/618)) +- fix: Throw error if there is no refresh token despite the necessity of refreshing ([#605](https://github.com/googleapis/google-auth-library-nodejs/pull/605)) + +### New Features +- feat: allow passing constructor options to getClient ([#611](https://github.com/googleapis/google-auth-library-nodejs/pull/611)) + +### Documentation +- docs: update contributing path in README ([#621](https://github.com/googleapis/google-auth-library-nodejs/pull/621)) +- chore: move CONTRIBUTING.md to root ([#619](https://github.com/googleapis/google-auth-library-nodejs/pull/619)) +- docs: add lint/fix example to contributing guide ([#615](https://github.com/googleapis/google-auth-library-nodejs/pull/615)) +- docs: use the People API for samples ([#609](https://github.com/googleapis/google-auth-library-nodejs/pull/609)) + +### Internal / Testing Changes +- chore(deps): update dependency typescript to ~3.3.0 ([#612](https://github.com/googleapis/google-auth-library-nodejs/pull/612)) +- chore(deps): update dependency eslint-config-prettier to v4 ([#604](https://github.com/googleapis/google-auth-library-nodejs/pull/604)) +- build: ignore googleapis.com in doc link check ([#602](https://github.com/googleapis/google-auth-library-nodejs/pull/602)) +- chore(deps): update dependency karma to v4 ([#603](https://github.com/googleapis/google-auth-library-nodejs/pull/603)) + +## v3.0.1 + +01-16-2019 21:04 PST + +### Bug Fixes +- fix(deps): upgrade to the latest gaxios ([#596](https://github.com/googleapis/google-auth-library-nodejs/pull/596)) + +## v3.0.0 + +01-16-2019 10:00 PST + +Welcome to 3.0 🎉 This release has it all. New features, bug fixes, breaking changes, performance improvements - something for everyone! The biggest addition to this release is support for the browser via Webpack. + +**This release has breaking changes.** This release has a few breaking changes. These changes are unlikely to affect most clients. + +#### BREAKING: Migration from `axios` to `gaxios` +The 2.0 version of this library used the [axios](https://github.com/axios/axios) library for making HTTP requests. In the 3.0 release, this has been replaced by a *mostly* API compatible library [gaxios](https://github.com/JustinBeckwith/gaxios). The new request library natively supports proxies, and comes with a smaller dependency chain. While this is mostly an implementation detail, the `request` method was directly exposed via the `GoogleAuth.request` and `OAuth2Client.request` methods. The gaxios library aims to provide an API compatible implementation of axios, but that can never be 100% promised. If you run into bugs or differences that cause issues - please do let us know. + +#### BREAKING: `generateCodeVerifier` is now `generateCodeVerifierAsync` +The `OAuth2Client.generateCodeVerifier` method has been replaced by the `OAuth2Client.generateCodeVerifierAsync` method. It has changed from a synchronous method to an asynchronous method to support async browser crypto APIs required for Webpack support. + +#### BREAKING: `verifySignedJwtWithCerts` is now `verifySignedJwtWithCertsAsync` +The `OAuth2Client.verifySignedJwtWithCerts` method has been replaced by the `OAuth2Client.verifySignedJwtWithCertsAsync` method. It has changed from a synchronous method to an asynchronous method to support async browser crypto APIs required for Webpack support. + + +### New Features +- feat: make it webpackable ([#371](https://github.com/google/google-auth-library-nodejs/pull/371)) + +### Bug Fixes +- fix: accept lowercase env vars ([#578](https://github.com/google/google-auth-library-nodejs/pull/578)) + +### Dependencies +- chore(deps): update gtoken ([#592](https://github.com/google/google-auth-library-nodejs/pull/592)) +- fix(deps): upgrade to gcp-metadata v0.9.3 ([#586](https://github.com/google/google-auth-library-nodejs/pull/586)) + +### Documentation +- docs: update bug report link ([#585](https://github.com/google/google-auth-library-nodejs/pull/585)) +- docs: clarify access and refresh token docs ([#577](https://github.com/google/google-auth-library-nodejs/pull/577)) + +### Internal / Testing Changes +- refactor(deps): use `gaxios` for HTTP requests instead of `axios` ([#593](https://github.com/google/google-auth-library-nodejs/pull/593)) +- fix: some browser fixes ([#590](https://github.com/google/google-auth-library-nodejs/pull/590)) +- chore(deps): update dependency ts-loader to v5 ([#588](https://github.com/google/google-auth-library-nodejs/pull/588)) +- chore(deps): update dependency karma to v3 ([#587](https://github.com/google/google-auth-library-nodejs/pull/587)) +- build: check broken links in generated docs ([#579](https://github.com/google/google-auth-library-nodejs/pull/579)) +- chore(deps): drop unused dep on typedoc ([#583](https://github.com/google/google-auth-library-nodejs/pull/583)) +- build: add browser test running on Kokoro ([#584](https://github.com/google/google-auth-library-nodejs/pull/584)) +- test: improve samples and add tests ([#576](https://github.com/google/google-auth-library-nodejs/pull/576)) + +## v2.0.2 + +12-16-2018 10:48 PST + +### Fixes +- fix(types): export GCPEnv type ([#569](https://github.com/google/google-auth-library-nodejs/pull/569)) +- fix: use post for token revocation ([#524](https://github.com/google/google-auth-library-nodejs/pull/524)) + +### Dependencies +- fix(deps): update dependency lru-cache to v5 ([#541](https://github.com/google/google-auth-library-nodejs/pull/541)) + +### Documentation +- docs: add ref docs again ([#553](https://github.com/google/google-auth-library-nodejs/pull/553)) +- docs: clean up the readme ([#554](https://github.com/google/google-auth-library-nodejs/pull/554)) + +### Internal / Testing Changes +- chore(deps): update dependency @types/sinon to v7 ([#568](https://github.com/google/google-auth-library-nodejs/pull/568)) +- refactor: use execa for install tests, run eslint on samples ([#559](https://github.com/google/google-auth-library-nodejs/pull/559)) +- chore(build): inject yoshi automation key ([#566](https://github.com/google/google-auth-library-nodejs/pull/566)) +- chore: update nyc and eslint configs ([#565](https://github.com/google/google-auth-library-nodejs/pull/565)) +- chore: fix publish.sh permission +x ([#563](https://github.com/google/google-auth-library-nodejs/pull/563)) +- fix(build): fix Kokoro release script ([#562](https://github.com/google/google-auth-library-nodejs/pull/562)) +- build: add Kokoro configs for autorelease ([#561](https://github.com/google/google-auth-library-nodejs/pull/561)) +- chore: always nyc report before calling codecov ([#557](https://github.com/google/google-auth-library-nodejs/pull/557)) +- chore: nyc ignore build/test by default ([#556](https://github.com/google/google-auth-library-nodejs/pull/556)) +- chore(build): update the prettier and renovate config ([#552](https://github.com/google/google-auth-library-nodejs/pull/552)) +- chore: update license file ([#551](https://github.com/google/google-auth-library-nodejs/pull/551)) +- fix(build): fix system key decryption ([#547](https://github.com/google/google-auth-library-nodejs/pull/547)) +- chore(deps): update dependency typescript to ~3.2.0 ([#546](https://github.com/google/google-auth-library-nodejs/pull/546)) +- chore(deps): unpin sinon ([#544](https://github.com/google/google-auth-library-nodejs/pull/544)) +- refactor: drop non-required modules ([#542](https://github.com/google/google-auth-library-nodejs/pull/542)) +- chore: add synth.metadata ([#537](https://github.com/google/google-auth-library-nodejs/pull/537)) +- fix: Pin @types/sinon to last compatible version ([#538](https://github.com/google/google-auth-library-nodejs/pull/538)) +- chore(deps): update dependency gts to ^0.9.0 ([#531](https://github.com/google/google-auth-library-nodejs/pull/531)) +- chore: update eslintignore config ([#530](https://github.com/google/google-auth-library-nodejs/pull/530)) +- chore: drop contributors from multiple places ([#528](https://github.com/google/google-auth-library-nodejs/pull/528)) +- chore: use latest npm on Windows ([#527](https://github.com/google/google-auth-library-nodejs/pull/527)) +- chore: update CircleCI config ([#523](https://github.com/google/google-auth-library-nodejs/pull/523)) +- chore: include build in eslintignore ([#516](https://github.com/google/google-auth-library-nodejs/pull/516)) + +## v2.0.1 + +### Implementation Changes +- fix: verifyIdToken will never return null ([#488](https://github.com/google/google-auth-library-nodejs/pull/488)) +- Update the url to application default credentials ([#470](https://github.com/google/google-auth-library-nodejs/pull/470)) +- Update omitted parameter 'hd' ([#467](https://github.com/google/google-auth-library-nodejs/pull/467)) + +### Dependencies +- chore(deps): update dependency nock to v10 ([#501](https://github.com/google/google-auth-library-nodejs/pull/501)) +- chore(deps): update dependency sinon to v7 ([#502](https://github.com/google/google-auth-library-nodejs/pull/502)) +- chore(deps): update dependency typescript to v3.1.3 ([#503](https://github.com/google/google-auth-library-nodejs/pull/503)) +- chore(deps): update dependency gh-pages to v2 ([#499](https://github.com/google/google-auth-library-nodejs/pull/499)) +- chore(deps): update dependency typedoc to ^0.13.0 ([#497](https://github.com/google/google-auth-library-nodejs/pull/497)) + +### Documentation +- docs: Remove code format from Application Default Credentials ([#483](https://github.com/google/google-auth-library-nodejs/pull/483)) +- docs: replace google/ with googleapis/ in URIs ([#472](https://github.com/google/google-auth-library-nodejs/pull/472)) +- Fix typo in readme ([#469](https://github.com/google/google-auth-library-nodejs/pull/469)) +- Update samples and docs for 2.0 ([#459](https://github.com/google/google-auth-library-nodejs/pull/459)) + +### Internal / Testing Changes +- chore: update issue templates ([#509](https://github.com/google/google-auth-library-nodejs/pull/509)) +- chore: remove old issue template ([#507](https://github.com/google/google-auth-library-nodejs/pull/507)) +- build: run tests on node11 ([#506](https://github.com/google/google-auth-library-nodejs/pull/506)) +- chore(build): drop hard rejection and update gts in the kitchen test ([#504](https://github.com/google/google-auth-library-nodejs/pull/504)) +- chores(build): do not collect sponge.xml from windows builds ([#500](https://github.com/google/google-auth-library-nodejs/pull/500)) +- chores(build): run codecov on continuous builds ([#495](https://github.com/google/google-auth-library-nodejs/pull/495)) +- chore: update new issue template ([#494](https://github.com/google/google-auth-library-nodejs/pull/494)) +- build: fix codecov uploading on Kokoro ([#490](https://github.com/google/google-auth-library-nodejs/pull/490)) +- test: move kitchen sink tests to system-test ([#489](https://github.com/google/google-auth-library-nodejs/pull/489)) +- Update kokoro config ([#482](https://github.com/google/google-auth-library-nodejs/pull/482)) +- fix: export additional typescript types ([#479](https://github.com/google/google-auth-library-nodejs/pull/479)) +- Don't publish sourcemaps ([#478](https://github.com/google/google-auth-library-nodejs/pull/478)) +- test: remove appveyor config ([#477](https://github.com/google/google-auth-library-nodejs/pull/477)) +- Enable prefer-const in the eslint config ([#473](https://github.com/google/google-auth-library-nodejs/pull/473)) +- Enable no-var in eslint ([#471](https://github.com/google/google-auth-library-nodejs/pull/471)) +- Update CI config ([#468](https://github.com/google/google-auth-library-nodejs/pull/468)) +- Retry npm install in CI ([#465](https://github.com/google/google-auth-library-nodejs/pull/465)) +- Update Kokoro config ([#462](https://github.com/google/google-auth-library-nodejs/pull/462)) + +## v2.0.0 + +Well hello 2.0 🎉 **This release has multiple breaking changes**. It also has a lot of bug fixes. + +### Breaking Changes + +#### Support for node.js 4.x and 9.x has been dropped +These versions of node.js are no longer supported. + +#### The `getRequestMetadata` method has been deprecated +The `getRequestMetadata` method has been deprecated on the `IAM`, `OAuth2`, `JWT`, and `JWTAccess` classes. The `getRequestHeaders` method should be used instead. The methods have a subtle difference: the `getRequestMetadata` method returns an object with a headers property, which contains the authorization header. The `getRequestHeaders` method simply returns the headers. + +##### Old code +```js +const client = await auth.getClient(); +const res = await client.getRequestMetadata(); +const headers = res.headers; +``` + +##### New code +```js +const client = await auth.getClient(); +const headers = await client.getRequestHeaders(); +``` + +#### The `createScopedRequired` method has been deprecated +The `createScopedRequired` method has been deprecated on multiple classes. The `createScopedRequired` and `createScoped` methods on the `JWT` class were largely in place to help inform clients when scopes were required in an application default credential scenario. Instead of checking if scopes are required after creating the client, instead scopes should just be passed either into the `GoogleAuth.getClient` method, or directly into the `JWT` constructor. + +##### Old code +```js +auth.getApplicationDefault(function(err, authClient) { + if (err) { + return callback(err); + } + if (authClient.createScopedRequired && authClient.createScopedRequired()) { + authClient = authClient.createScoped([ + 'https://www.googleapis.com/auth/cloud-platform' + ]); + } + callback(null, authClient); +}); +``` + +##### New code +```js +const client = await auth.getClient({ + scopes: ['https://www.googleapis.com/auth/cloud-platform'] +}); +``` + +#### Deprecate `refreshAccessToken` + +_Note: `refreshAccessToken` is no longer deprecated._ + +`getAccessToken`, `getRequestMetadata`, and `request` methods will all refresh the token if needed automatically. + +You should not need to invoke `refreshAccessToken` directly except in [certain edge-cases](https://github.com/googleapis/google-auth-library-nodejs/issues/575). + +### Features +- Set private_key_id in JWT access token header like other google auth libraries. (#450) + +### Bug Fixes +- fix: support HTTPS proxies (#405) +- fix: export missing interfaces (#437) +- fix: Use new auth URIs (#434) +- docs: Fix broken link (#423) +- fix: surface file read streams (#413) +- fix: prevent unhandled rejections by avoid .catch (#404) +- fix: use gcp-metadata for compute credentials (#409) +- Add Code of Conduct +- fix: Warn when using user credentials from the Cloud SDK (#399) +- fix: use `Buffer.from` instead of `new Buffer` (#400) +- fix: Fix link format in README.md (#385) + +### Breaking changes +- chore: deprecate getRequestMetadata (#414) +- fix: deprecate the `createScopedRequired` methods (#410) +- fix: drop support for node.js 4.x and 9.x (#417) +- fix: deprecate the `refreshAccessToken` methods (#411) +- fix: deprecate the `getDefaultProjectId` method (#402) +- fix: drop support for node.js 4 (#401) + +### Build / Test changes +- Run synth to make build tools consistent (#455) +- Add a package.json for samples and cleanup README (#454) +- chore(deps): update dependency typedoc to ^0.12.0 (#453) +- chore: move examples => samples + synth (#448) +- chore(deps): update dependency nyc to v13 (#452) +- chore(deps): update dependency pify to v4 (#447) +- chore(deps): update dependency assert-rejects to v1 (#446) +- chore: ignore package-lock.json (#445) +- chore: update renovate config (#442) +- chore(deps): lock file maintenance (#443) +- chore: remove greenkeeper badge (#440) +- test: throw on deprecation +- chore: add intelli-espower-loader for running tests (#430) +- chore(deps): update dependency typescript to v3 (#432) +- chore(deps): lock file maintenance (#431) +- test: use strictEqual in tests (#425) +- chore(deps): lock file maintenance (#428) +- chore: Configure Renovate (#424) +- chore: Update gts to the latest version 🚀 (#422) +- chore: update gcp-metadata for isAvailable fix (#420) +- refactor: use assert.reject in the tests (#415) +- refactor: cleanup types for certificates (#412) +- test: run tests with hard-rejection (#397) +- cleanup: straighten nested try-catch (#394) +- test: getDefaultProjectId should prefer config (#388) +- chore(package): Update gts to the latest version 🚀 (#387) +- chore(package): update sinon to version 6.0.0 (#386) + +## Upgrading to 1.x +The `1.x` release includes a variety of bug fixes, new features, and breaking changes. Please take care, and see [the release notes](https://github.com/googleapis/google-auth-library-nodejs/releases/tag/v1.0.0) for a list of breaking changes, and the upgrade guide. diff --git a/node_modules/google-auth-library/LICENSE b/node_modules/google-auth-library/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/google-auth-library/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/google-auth-library/README.md b/node_modules/google-auth-library/README.md new file mode 100644 index 0000000..884cbbc --- /dev/null +++ b/node_modules/google-auth-library/README.md @@ -0,0 +1,1293 @@ +[//]: # "This README.md file is auto-generated, all changes to this file will be lost." +[//]: # "To regenerate it, use `python -m synthtool`." +Google Cloud Platform logo + +# [Google Auth Library: Node.js Client](https://github.com/googleapis/google-auth-library-nodejs) + +[![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) +[![npm version](https://img.shields.io/npm/v/google-auth-library.svg)](https://www.npmjs.org/package/google-auth-library) + + + + +This is Google's officially supported [node.js](http://nodejs.org/) client library for using OAuth 2.0 authorization and authentication with Google APIs. + + +A comprehensive list of changes in each version may be found in +[the CHANGELOG](https://github.com/googleapis/google-auth-library-nodejs/blob/main/CHANGELOG.md). + +* [Google Auth Library Node.js Client API Reference][client-docs] +* [Google Auth Library Documentation][product-docs] +* [github.com/googleapis/google-auth-library-nodejs](https://github.com/googleapis/google-auth-library-nodejs) + +Read more about the client libraries for Cloud APIs, including the older +Google APIs Client Libraries, in [Client Libraries Explained][explained]. + +[explained]: https://cloud.google.com/apis/docs/client-libraries-explained + +**Table of contents:** + + +* [Quickstart](#quickstart) + + * [Installing the client library](#installing-the-client-library) + +* [Samples](#samples) +* [Versioning](#versioning) +* [Contributing](#contributing) +* [License](#license) + +## Quickstart + +### Installing the client library + +```bash +npm install google-auth-library +``` + +## Ways to authenticate +This library provides a variety of ways to authenticate to your Google services. +- [Application Default Credentials](#choosing-the-correct-credential-type-automatically) - Use Application Default Credentials when you use a single identity for all users in your application. Especially useful for applications running on Google Cloud. Application Default Credentials also support workload identity federation to access Google Cloud resources from non-Google Cloud platforms. +- [OAuth 2](#oauth2) - Use OAuth2 when you need to perform actions on behalf of the end user. +- [JSON Web Tokens](#json-web-tokens) - Use JWT when you are using a single identity for all users. Especially useful for server->server or server->API communication. +- [Google Compute](#compute) - Directly use a service account on Google Cloud Platform. Useful for server->server or server->API communication. +- [Workload Identity Federation](#workload-identity-federation) - Use workload identity federation to access Google Cloud resources from Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). +- [Workforce Identity Federation](#workforce-identity-federation) - Use workforce identity federation to access Google Cloud resources using an external identity provider (IdP) to authenticate and authorize a workforce—a group of users, such as employees, partners, and contractors—using IAM, so that the users can access Google Cloud services. +- [Impersonated Credentials Client](#impersonated-credentials-client) - access protected resources on behalf of another service account. +- [Downscoped Client](#downscoped-client) - Use Downscoped Client with Credential Access Boundary to generate a short-lived credential with downscoped, restricted IAM permissions that can use for Cloud Storage. + +## Application Default Credentials +This library provides an implementation of [Application Default Credentials](https://cloud.google.com/docs/authentication/getting-started) for Node.js. The [Application Default Credentials](https://cloud.google.com/docs/authentication/getting-started) provide a simple way to get authorization credentials for use in calling Google APIs. + +They are best suited for cases when the call needs to have the same identity and authorization level for the application independent of the user. This is the recommended approach to authorize calls to Cloud APIs, particularly when you're building an application that uses Google Cloud Platform. + +Application Default Credentials also support workload identity federation to access Google Cloud resources from non-Google Cloud platforms including Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). Workload identity federation is recommended for non-Google Cloud environments as it avoids the need to download, manage and store service account private keys locally, see: [Workload Identity Federation](#workload-identity-federation). + +#### Download your Service Account Credentials JSON file + +To use Application Default Credentials, You first need to download a set of JSON credentials for your project. Go to **APIs & Auth** > **Credentials** in the [Google Developers Console](https://console.cloud.google.com/) and select **Service account** from the **Add credentials** dropdown. + +> This file is your *only copy* of these credentials. It should never be +> committed with your source code, and should be stored securely. + +Once downloaded, store the path to this file in the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + +#### Enable the API you want to use + +Before making your API call, you must be sure the API you're calling has been enabled. Go to **APIs & Auth** > **APIs** in the [Google Developers Console](https://console.cloud.google.com/) and enable the APIs you'd like to call. For the example below, you must enable the `DNS API`. + + +#### Choosing the correct credential type automatically + +Rather than manually creating an OAuth2 client, JWT client, or Compute client, the auth library can create the correct credential type for you, depending upon the environment your code is running under. + +For example, a JWT auth client will be created when your code is running on your local developer machine, and a Compute client will be created when the same code is running on Google Cloud Platform. If you need a specific set of scopes, you can pass those in the form of a string or an array to the `GoogleAuth` constructor. + +The code below shows how to retrieve a default credential type, depending upon the runtime environment. + +```js +const {GoogleAuth} = require('google-auth-library'); + +/** +* Instead of specifying the type of client you'd like to use (JWT, OAuth2, etc) +* this library will automatically choose the right client based on the environment. +*/ +async function main() { + const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform' + }); + const client = await auth.getClient(); + const projectId = await auth.getProjectId(); + const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + const res = await client.request({ url }); + console.log(res.data); +} + +main().catch(console.error); +``` + +## OAuth2 + +This library comes with an [OAuth2](https://developers.google.com/identity/protocols/OAuth2) client that allows you to retrieve an access token and refreshes the token and retry the request seamlessly if you also provide an `expiry_date` and the token is expired. The basics of Google's OAuth2 implementation is explained on [Google Authorization and Authentication documentation](https://developers.google.com/accounts/docs/OAuth2Login). + +In the following examples, you may need a `CLIENT_ID`, `CLIENT_SECRET` and `REDIRECT_URL`. You can find these pieces of information by going to the [Developer Console](https://console.cloud.google.com/), clicking your project > APIs & auth > credentials. + +For more information about OAuth2 and how it works, [see here](https://developers.google.com/identity/protocols/OAuth2). + +#### A complete OAuth2 example + +Let's take a look at a complete example. + +``` js +const {OAuth2Client} = require('google-auth-library'); +const http = require('http'); +const url = require('url'); +const open = require('open'); +const destroyer = require('server-destroy'); + +// Download your OAuth2 configuration from the Google +const keys = require('./oauth2.keys.json'); + +/** +* Start by acquiring a pre-authenticated oAuth2 client. +*/ +async function main() { + const oAuth2Client = await getAuthenticatedClient(); + // Make a simple request to the People API using our pre-authenticated client. The `request()` method + // takes an GaxiosOptions object. Visit https://github.com/JustinBeckwith/gaxios. + const url = 'https://people.googleapis.com/v1/people/me?personFields=names'; + const res = await oAuth2Client.request({url}); + console.log(res.data); + + // After acquiring an access_token, you may want to check on the audience, expiration, + // or original scopes requested. You can do that with the `getTokenInfo` method. + const tokenInfo = await oAuth2Client.getTokenInfo( + oAuth2Client.credentials.access_token + ); + console.log(tokenInfo); +} + +/** +* Create a new OAuth2Client, and go through the OAuth2 content +* workflow. Return the full client to the callback. +*/ +function getAuthenticatedClient() { + return new Promise((resolve, reject) => { + // create an oAuth client to authorize the API call. Secrets are kept in a `keys.json` file, + // which should be downloaded from the Google Developers Console. + const oAuth2Client = new OAuth2Client( + keys.web.client_id, + keys.web.client_secret, + keys.web.redirect_uris[0] + ); + + // Generate the url that will be used for the consent dialog. + const authorizeUrl = oAuth2Client.generateAuthUrl({ + access_type: 'offline', + scope: 'https://www.googleapis.com/auth/userinfo.profile', + }); + + // Open an http server to accept the oauth callback. In this simple example, the + // only request to our webserver is to /oauth2callback?code= + const server = http + .createServer(async (req, res) => { + try { + if (req.url.indexOf('/oauth2callback') > -1) { + // acquire the code from the querystring, and close the web server. + const qs = new url.URL(req.url, 'http://localhost:3000') + .searchParams; + const code = qs.get('code'); + console.log(`Code is ${code}`); + res.end('Authentication successful! Please return to the console.'); + server.destroy(); + + // Now that we have the code, use that to acquire tokens. + const r = await oAuth2Client.getToken(code); + // Make sure to set the credentials on the OAuth2 client. + oAuth2Client.setCredentials(r.tokens); + console.info('Tokens acquired.'); + resolve(oAuth2Client); + } + } catch (e) { + reject(e); + } + }) + .listen(3000, () => { + // open the browser to the authorize url to start the workflow + open(authorizeUrl, {wait: false}).then(cp => cp.unref()); + }); + destroyer(server); + }); +} + +main().catch(console.error); +``` + +#### Handling token events + +This library will automatically obtain an `access_token`, and automatically refresh the `access_token` if a `refresh_token` is present. The `refresh_token` is only returned on the [first authorization](https://github.com/googleapis/google-api-nodejs-client/issues/750#issuecomment-304521450), so if you want to make sure you store it safely. An easy way to make sure you always store the most recent tokens is to use the `tokens` event: + +```js +const client = await auth.getClient(); + +client.on('tokens', (tokens) => { + if (tokens.refresh_token) { + // store the refresh_token in my database! + console.log(tokens.refresh_token); + } + console.log(tokens.access_token); +}); + +const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; +const res = await client.request({ url }); +// The `tokens` event would now be raised if this was the first request +``` + +#### Retrieve access token +With the code returned, you can ask for an access token as shown below: + +``` js +const tokens = await oauth2Client.getToken(code); +// Now tokens contains an access_token and an optional refresh_token. Save them. +oauth2Client.setCredentials(tokens); +``` + +#### Obtaining a new Refresh Token +If you need to obtain a new `refresh_token`, ensure the call to `generateAuthUrl` sets the `access_type` to `offline`. The refresh token will only be returned for the first authorization by the user. To force consent, set the `prompt` property to `consent`: + +```js +// Generate the url that will be used for the consent dialog. +const authorizeUrl = oAuth2Client.generateAuthUrl({ + // To get a refresh token, you MUST set access_type to `offline`. + access_type: 'offline', + // set the appropriate scopes + scope: 'https://www.googleapis.com/auth/userinfo.profile', + // A refresh token is only returned the first time the user + // consents to providing access. For illustration purposes, + // setting the prompt to 'consent' will force this consent + // every time, forcing a refresh_token to be returned. + prompt: 'consent' +}); +``` + +#### Checking `access_token` information +After obtaining and storing an `access_token`, at a later time you may want to go check the expiration date, +original scopes, or audience for the token. To get the token info, you can use the `getTokenInfo` method: + +```js +// after acquiring an oAuth2Client... +const tokenInfo = await oAuth2Client.getTokenInfo('my-access-token'); + +// take a look at the scopes originally provisioned for the access token +console.log(tokenInfo.scopes); +``` + +This method will throw if the token is invalid. + +## JSON Web Tokens +The Google Developers Console provides a `.json` file that you can use to configure a JWT auth client and authenticate your requests, for example when using a service account. + +``` js +const {JWT} = require('google-auth-library'); +const keys = require('./jwt.keys.json'); + +async function main() { + const client = new JWT({ + email: keys.client_email, + key: keys.private_key, + scopes: ['https://www.googleapis.com/auth/cloud-platform'], + }); + const url = `https://dns.googleapis.com/dns/v1/projects/${keys.project_id}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +The parameters for the JWT auth client including how to use it with a `.pem` file are explained in [samples/jwt.js](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/jwt.js). + +#### Loading credentials from environment variables +Instead of loading credentials from a key file, you can also provide them using an environment variable and the `GoogleAuth.fromJSON()` method. This is particularly convenient for systems that deploy directly from source control (Heroku, App Engine, etc). + +Start by exporting your credentials: + +``` +$ export CREDS='{ + "type": "service_account", + "project_id": "your-project-id", + "private_key_id": "your-private-key-id", + "private_key": "your-private-key", + "client_email": "your-client-email", + "client_id": "your-client-id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "your-cert-url" +}' +``` +Now you can create a new client from the credentials: + +```js +const {auth} = require('google-auth-library'); + +// load the environment variable with our keys +const keysEnvVar = process.env['CREDS']; +if (!keysEnvVar) { + throw new Error('The $CREDS environment variable was not found!'); +} +const keys = JSON.parse(keysEnvVar); + +async function main() { + // load the JWT or UserRefreshClient from the keys + const client = auth.fromJSON(keys); + client.scopes = ['https://www.googleapis.com/auth/cloud-platform']; + const url = `https://dns.googleapis.com/dns/v1/projects/${keys.project_id}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +#### Using a Proxy +You can set the `HTTPS_PROXY` or `https_proxy` environment variables to proxy HTTPS requests. When `HTTPS_PROXY` or `https_proxy` are set, they will be used to proxy SSL requests that do not have an explicit proxy configuration option present. + +## Compute +If your application is running on Google Cloud Platform, you can authenticate using the default service account or by specifying a specific service account. + +**Note**: In most cases, you will want to use [Application Default Credentials](#choosing-the-correct-credential-type-automatically). Direct use of the `Compute` class is for very specific scenarios. + +``` js +const {auth, Compute} = require('google-auth-library'); + +async function main() { + const client = new Compute({ + // Specifying the service account email is optional. + serviceAccountEmail: 'my-service-account@example.com' + }); + const projectId = await auth.getProjectId(); + const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +## Workload Identity Federation + +Using workload identity federation, your application can access Google Cloud resources from Amazon Web Services (AWS), Microsoft Azure or any identity provider that supports OpenID Connect (OIDC). + +Traditionally, applications running outside Google Cloud have used service account keys to access Google Cloud resources. Using identity federation, you can allow your workload to impersonate a service account. +This lets you access Google Cloud resources directly, eliminating the maintenance and security burden associated with service account keys. + +### Accessing resources from AWS + +In order to access Google Cloud resources from Amazon Web Services (AWS), the following requirements are needed: +- A workload identity pool needs to be created. +- AWS needs to be added as an identity provider in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from AWS). +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-aws) on how to configure workload identity federation from AWS. + +After configuring the AWS provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +To generate the AWS workload identity configuration, run the following command: + +```bash +# Generate an AWS configuration file. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AWS_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --aws \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AWS_PROVIDER_ID`: The AWS provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. + +This will generate the configuration file in the specified output file. + +If you want to use the AWS IMDSv2 flow, you can add the field below to the credential_source in your AWS ADC configuration file: +"imdsv2_session_token_url": "http://169.254.169.254/latest/api/token" +The gcloud create-cred-config command will be updated to support this soon. + +You can now [start using the Auth library](#using-external-identities) to call Google Cloud resources from AWS. + +### Access resources from Microsoft Azure + +In order to access Google Cloud resources from Microsoft Azure, the following requirements are needed: +- A workload identity pool needs to be created. +- Azure needs to be added as an identity provider in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from Azure). +- The Azure tenant needs to be configured for identity federation. +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-azure) on how to configure workload identity federation from Microsoft Azure. + +After configuring the Azure provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +To generate the Azure workload identity configuration, run the following command: + +```bash +# Generate an Azure configuration file. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AZURE_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --azure \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AZURE_PROVIDER_ID`: The Azure provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. + +This will generate the configuration file in the specified output file. + +You can now [start using the Auth library](#using-external-identities) to call Google Cloud resources from Azure. + +### Accessing resources from an OIDC identity provider + +In order to access Google Cloud resources from an identity provider that supports [OpenID Connect (OIDC)](https://openid.net/connect/), the following requirements are needed: +- A workload identity pool needs to be created. +- An OIDC identity provider needs to be added in the workload identity pool (The Google [organization policy](https://cloud.google.com/iam/docs/manage-workload-identity-pools-providers#restrict) needs to allow federation from the identity provider). +- Permission to impersonate a service account needs to be granted to the external identity. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/access-resources-oidc) on how to configure workload identity federation from an OIDC identity provider. + +After configuring the OIDC provider to impersonate a service account, a credential configuration file needs to be generated. +Unlike service account credential files, the generated credential configuration file will only contain non-sensitive metadata to instruct the library on how to retrieve external subject tokens and exchange them for service account access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +For OIDC providers, the Auth library can retrieve OIDC tokens either from a local file location (file-sourced credentials) or from a local server (URL-sourced credentials). + +**File-sourced credentials** +For file-sourced credentials, a background process needs to be continuously refreshing the file location with a new OIDC token prior to expiration. +For tokens with one hour lifetimes, the token needs to be updated in the file every hour. The token can be stored directly as plain text or in JSON format. + +To generate a file-sourced OIDC configuration, run the following command: + +```bash +# Generate an OIDC configuration file for file-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$OIDC_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --credential-source-file $PATH_TO_OIDC_ID_TOKEN \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$OIDC_PROVIDER_ID`: The OIDC provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$PATH_TO_OIDC_ID_TOKEN`: The file path where the OIDC token will be retrieved from. + +This will generate the configuration file in the specified output file. + +**URL-sourced credentials** +For URL-sourced credentials, a local server needs to host a GET endpoint to return the OIDC token. The response can be in plain text or JSON. +Additional required request headers can also be specified. + +To generate a URL-sourced OIDC workload identity configuration, run the following command: + +```bash +# Generate an OIDC configuration file for URL-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$OIDC_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --credential-source-url $URL_TO_GET_OIDC_TOKEN \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file /path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$OIDC_PROVIDER_ID`: The OIDC provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$URL_TO_GET_OIDC_TOKEN`: The URL of the local server endpoint to call to retrieve the OIDC token. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to `$URL_TO_GET_OIDC_TOKEN`, e.g. `Metadata-Flavor=Google`. + +#### Using External Account Authorized User workforce credentials + +[External account authorized user credentials](https://cloud.google.com/iam/docs/workforce-obtaining-short-lived-credentials#browser-based-sign-in) allow you to sign in with a web browser to an external identity provider account via the +gcloud CLI and create a configuration for the auth library to use. + +To generate an external account authorized user workforce identity configuration, run the following command: + +```bash +gcloud auth application-default login --login-config=$LOGIN_CONFIG +``` + +Where the following variable needs to be substituted: +- `$LOGIN_CONFIG`: The login config file generated with the cloud console or + [gcloud iam workforce-pools create-login-config](https://cloud.google.com/sdk/gcloud/reference/iam/workforce-pools/create-login-config) + +This will open a browser flow for you to sign in via the configured third party identity provider +and then will store the external account authorized user configuration at the well known ADC location. +The auth library will then use the provided refresh token from the configuration to generate and refresh +an access token to call Google Cloud services. + +Note that the default lifetime of the refresh token is one hour, after which a new configuration will need to be generated from the gcloud CLI. +The lifetime can be modified by changing the [session duration of the workforce pool](https://cloud.google.com/iam/docs/reference/rest/v1/locations.workforcePools), and can be set as high as 12 hours. + +#### Using Executable-sourced credentials with OIDC and SAML + +**Executable-sourced credentials** +For executable-sourced credentials, a local executable is used to retrieve the 3rd party token. +The executable must handle providing a valid, unexpired OIDC ID token or SAML assertion in JSON format +to stdout. + +To use executable-sourced credentials, the `GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES` +environment variable must be set to `1`. + +To generate an executable-sourced workload identity configuration, run the following command: + +```bash +# Generate a configuration file for executable-sourced credentials. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$PROVIDER_ID \ + --service-account=$SERVICE_ACCOUNT_EMAIL \ + --subject-token-type=$SUBJECT_TOKEN_TYPE \ + # The absolute path for the program, including arguments. + # e.g. --executable-command="/path/to/command --foo=bar" + --executable-command=$EXECUTABLE_COMMAND \ + # Optional argument for the executable timeout. Defaults to 30s. + # --executable-timeout-millis=$EXECUTABLE_TIMEOUT \ + # Optional argument for the absolute path to the executable output file. + # See below on how this argument impacts the library behaviour. + # --executable-output-file=$EXECUTABLE_OUTPUT_FILE \ + --output-file /path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$PROVIDER_ID`: The OIDC or SAML provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$SUBJECT_TOKEN_TYPE`: The subject token type. +- `$EXECUTABLE_COMMAND`: The full command to run, including arguments. Must be an absolute path to the program. + +The `--executable-timeout-millis` flag is optional. This is the duration for which +the auth library will wait for the executable to finish, in milliseconds. +Defaults to 30 seconds when not provided. The maximum allowed value is 2 minutes. +The minimum is 5 seconds. + +The `--executable-output-file` flag is optional. If provided, the file path must +point to the 3PI credential response generated by the executable. This is useful +for caching the credentials. By specifying this path, the Auth libraries will first +check for its existence before running the executable. By caching the executable JSON +response to this file, it improves performance as it avoids the need to run the executable +until the cached credentials in the output file are expired. The executable must +handle writing to this file - the auth libraries will only attempt to read from +this location. The format of contents in the file should match the JSON format +expected by the executable shown below. + +To retrieve the 3rd party token, the library will call the executable +using the command specified. The executable's output must adhere to the response format +specified below. It must output the response to stdout. + +A sample successful executable OIDC response: +```json +{ + "version": 1, + "success": true, + "token_type": "urn:ietf:params:oauth:token-type:id_token", + "id_token": "HEADER.PAYLOAD.SIGNATURE", + "expiration_time": 1620499962 +} +``` + +A sample successful executable SAML response: +```json +{ + "version": 1, + "success": true, + "token_type": "urn:ietf:params:oauth:token-type:saml2", + "saml_response": "...", + "expiration_time": 1620499962 +} +``` +For successful responses, the `expiration_time` field is only required +when an output file is specified in the credential configuration. + +A sample executable error response: +```json +{ + "version": 1, + "success": false, + "code": "401", + "message": "Caller not authorized." +} +``` +These are all required fields for an error response. The code and message +fields will be used by the library as part of the thrown exception. + +Response format fields summary: +* `version`: The version of the JSON output. Currently, only version 1 is supported. +* `success`: The status of the response. When true, the response must contain the 3rd party token + and token type. The response must also contain the expiration time if an output file was specified in the credential configuration. + The executable must also exit with exit code 0. + When false, the response must contain the error code and message fields and exit with a non-zero value. +* `token_type`: The 3rd party subject token type. Must be *urn:ietf:params:oauth:token-type:jwt*, +*urn:ietf:params:oauth:token-type:id_token*, or *urn:ietf:params:oauth:token-type:saml2*. +* `id_token`: The 3rd party OIDC token. +* `saml_response`: The 3rd party SAML response. +* `expiration_time`: The 3rd party subject token expiration time in seconds (unix epoch time). +* `code`: The error code string. +* `message`: The error message. + +All response types must include both the `version` and `success` fields. +* Successful responses must include the `token_type` and one of +`id_token` or `saml_response`. The `expiration_time` field must also be present if an output file was specified in +the credential configuration. +* Error responses must include both the `code` and `message` fields. + +The library will populate the following environment variables when the executable is run: +* `GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE`: The audience field from the credential configuration. Always present. +* `GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL`: The service account email. Only present when service account impersonation is used. +* `GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE`: The output file location from the credential configuration. Only present when specified in the credential configuration. +* `GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE`: This expected subject token type. Always present. + +These environment variables can be used by the executable to avoid hard-coding these values. + +##### Security considerations +The following security practices are highly recommended: +* Access to the script should be restricted as it will be displaying credentials to stdout. This ensures that rogue processes do not gain access to the script. +* The configuration file should not be modifiable. Write access should be restricted to avoid processes modifying the executable command portion. + +Given the complexity of using executable-sourced credentials, it is recommended to use +the existing supported mechanisms (file-sourced/URL-sourced) for providing 3rd party +credentials unless they do not meet your specific requirements. + +You can now [use the Auth library](#using-external-identities) to call Google Cloud +resources from an OIDC or SAML provider. + +#### Configurable Token Lifetime +When creating a credential configuration with workload identity federation using service account impersonation, you can provide an optional argument to configure the service account access token lifetime. + +To generate the configuration with configurable token lifetime, run the following command (this example uses an AWS configuration, but the token lifetime can be configured for all workload identity federation providers): + +```bash +# Generate an AWS configuration file with configurable token lifetime. +gcloud iam workload-identity-pools create-cred-config \ + projects/$PROJECT_NUMBER/locations/global/workloadIdentityPools/$POOL_ID/providers/$AWS_PROVIDER_ID \ + --service-account $SERVICE_ACCOUNT_EMAIL \ + --aws \ + --output-file /path/to/generated/config.json \ + --service-account-token-lifetime-seconds $TOKEN_LIFETIME +``` + + Where the following variables need to be substituted: +- `$PROJECT_NUMBER`: The Google Cloud project number. +- `$POOL_ID`: The workload identity pool ID. +- `$AWS_PROVIDER_ID`: The AWS provider ID. +- `$SERVICE_ACCOUNT_EMAIL`: The email of the service account to impersonate. +- `$TOKEN_LIFETIME`: The desired lifetime duration of the service account access token in seconds. + +The `service-account-token-lifetime-seconds` flag is optional. If not provided, this defaults to one hour. +The minimum allowed value is 600 (10 minutes) and the maximum allowed value is 43200 (12 hours). +If a lifetime greater than one hour is required, the service account must be added as an allowed value in an Organization Policy that enforces the `constraints/iam.allowServiceAccountCredentialLifetimeExtension` constraint. + +Note that configuring a short lifetime (e.g. 10 minutes) will result in the library initiating the entire token exchange flow every 10 minutes, which will call the 3rd party token provider even if the 3rd party token is not expired. + +## Workforce Identity Federation + +[Workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation) lets you use an +external identity provider (IdP) to authenticate and authorize a workforce—a group of users, such as employees, +partners, and contractors—using IAM, so that the users can access Google Cloud services. Workforce identity federation +extends Google Cloud's identity capabilities to support syncless, attribute-based single sign on. + +With workforce identity federation, your workforce can access Google Cloud resources using an external +identity provider (IdP) that supports OpenID Connect (OIDC) or SAML 2.0 such as Azure Active Directory (Azure AD), +Active Directory Federation Services (AD FS), Okta, and others. + +### Accessing resources using an OIDC or SAML 2.0 identity provider + +In order to access Google Cloud resources from an identity provider that supports [OpenID Connect (OIDC)](https://openid.net/connect/), +the following requirements are needed: +- A workforce identity pool needs to be created. +- An OIDC or SAML 2.0 identity provider needs to be added in the workforce pool. + +Follow the detailed [instructions](https://cloud.google.com/iam/docs/configuring-workforce-identity-federation) on how +to configure workforce identity federation. + +After configuring an OIDC or SAML 2.0 provider, a credential configuration +file needs to be generated. The generated credential configuration file contains non-sensitive metadata to instruct the +library on how to retrieve external subject tokens and exchange them for GCP access tokens. +The configuration file can be generated by using the [gcloud CLI](https://cloud.google.com/sdk/). + +The Auth library can retrieve external subject tokens from a local file location +(file-sourced credentials), from a local server (URL-sourced credentials) or by calling an executable +(executable-sourced credentials). + +**File-sourced credentials** +For file-sourced credentials, a background process needs to be continuously refreshing the file +location with a new subject token prior to expiration. For tokens with one hour lifetimes, the token +needs to be updated in the file every hour. The token can be stored directly as plain text or in +JSON format. + +To generate a file-sourced OIDC configuration, run the following command: + +```bash +# Generate an OIDC configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:id_token \ + --credential-source-file=$PATH_TO_OIDC_ID_TOKEN \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + # Optional arguments for file types. Default is "text": + # --credential-source-type "json" \ + # Optional argument for the field that contains the OIDC credential. + # This is required for json. + # --credential-source-field-name "id_token" \ + --output-file=/path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$PATH_TO_OIDC_ID_TOKEN`: The file path used to retrieve the OIDC token. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +To generate a file-sourced SAML configuration, run the following command: + +```bash +# Generate a SAML configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --credential-source-file=$PATH_TO_SAML_ASSERTION \ + --subject-token-type=urn:ietf:params:oauth:token-type:saml2 \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$PATH_TO_SAML_ASSERTION`: The file path used to retrieve the base64-encoded SAML assertion. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +These commands generate the configuration file in the specified output file. + +**URL-sourced credentials** +For URL-sourced credentials, a local server needs to host a GET endpoint to return the OIDC token. +The response can be in plain text or JSON. Additional required request headers can also be +specified. + +To generate a URL-sourced OIDC workforce identity configuration, run the following command: + +```bash +# Generate an OIDC configuration file for URL-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:id_token \ + --credential-source-url=$URL_TO_RETURN_OIDC_ID_TOKEN \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$URL_TO_RETURN_OIDC_ID_TOKEN`: The URL of the local server endpoint. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to + `$URL_TO_GET_OIDC_TOKEN`, e.g. `Metadata-Flavor=Google`. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +To generate a URL-sourced SAML configuration, run the following command: + +```bash +# Generate a SAML configuration file for file-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=urn:ietf:params:oauth:token-type:saml2 \ + --credential-source-url=$URL_TO_GET_SAML_ASSERTION \ + --credential-source-headers $HEADER_KEY=$HEADER_VALUE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file=/path/to/generated/config.json +``` + +These commands generate the configuration file in the specified output file. + +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$URL_TO_GET_SAML_ASSERTION`: The URL of the local server endpoint. +- `$HEADER_KEY` and `$HEADER_VALUE`: The additional header key/value pairs to pass along the GET request to + `$URL_TO_GET_SAML_ASSERTION`, e.g. `Metadata-Flavor=Google`. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +### Using Executable-sourced workforce credentials with OIDC and SAML + +**Executable-sourced credentials** +For executable-sourced credentials, a local executable is used to retrieve the 3rd party token. +The executable must handle providing a valid, unexpired OIDC ID token or SAML assertion in JSON format +to stdout. + +To use executable-sourced credentials, the `GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES` +environment variable must be set to `1`. + +To generate an executable-sourced workforce identity configuration, run the following command: + +```bash +# Generate a configuration file for executable-sourced credentials. +gcloud iam workforce-pools create-cred-config \ + locations/global/workforcePools/$WORKFORCE_POOL_ID/providers/$PROVIDER_ID \ + --subject-token-type=$SUBJECT_TOKEN_TYPE \ + # The absolute path for the program, including arguments. + # e.g. --executable-command="/path/to/command --foo=bar" + --executable-command=$EXECUTABLE_COMMAND \ + # Optional argument for the executable timeout. Defaults to 30s. + # --executable-timeout-millis=$EXECUTABLE_TIMEOUT \ + # Optional argument for the absolute path to the executable output file. + # See below on how this argument impacts the library behaviour. + # --executable-output-file=$EXECUTABLE_OUTPUT_FILE \ + --workforce-pool-user-project=$WORKFORCE_POOL_USER_PROJECT \ + --output-file /path/to/generated/config.json +``` +Where the following variables need to be substituted: +- `$WORKFORCE_POOL_ID`: The workforce pool ID. +- `$PROVIDER_ID`: The provider ID. +- `$SUBJECT_TOKEN_TYPE`: The subject token type. +- `$EXECUTABLE_COMMAND`: The full command to run, including arguments. Must be an absolute path to the program. +- `$WORKFORCE_POOL_USER_PROJECT`: The project number associated with the [workforce pools user project](https://cloud.google.com/iam/docs/workforce-identity-federation#workforce-pools-user-project). + +The `--executable-timeout-millis` flag is optional. This is the duration for which +the auth library will wait for the executable to finish, in milliseconds. +Defaults to 30 seconds when not provided. The maximum allowed value is 2 minutes. +The minimum is 5 seconds. + +The `--executable-output-file` flag is optional. If provided, the file path must +point to the 3rd party credential response generated by the executable. This is useful +for caching the credentials. By specifying this path, the Auth libraries will first +check for its existence before running the executable. By caching the executable JSON +response to this file, it improves performance as it avoids the need to run the executable +until the cached credentials in the output file are expired. The executable must +handle writing to this file - the auth libraries will only attempt to read from +this location. The format of contents in the file should match the JSON format +expected by the executable shown below. + +To retrieve the 3rd party token, the library will call the executable +using the command specified. The executable's output must adhere to the response format +specified below. It must output the response to stdout. + +Refer to the [using executable-sourced credentials with Workload Identity Federation](#using-executable-sourced-credentials-with-oidc-and-saml) +above for the executable response specification. + +##### Security considerations +The following security practices are highly recommended: +* Access to the script should be restricted as it will be displaying credentials to stdout. This ensures that rogue processes do not gain access to the script. +* The configuration file should not be modifiable. Write access should be restricted to avoid processes modifying the executable command portion. + +Given the complexity of using executable-sourced credentials, it is recommended to use +the existing supported mechanisms (file-sourced/URL-sourced) for providing 3rd party +credentials unless they do not meet your specific requirements. + +You can now [use the Auth library](#using-external-identities) to call Google Cloud +resources from an OIDC or SAML provider. + +### Using External Identities + +External identities (AWS, Azure and OIDC-based providers) can be used with `Application Default Credentials`. +In order to use external identities with Application Default Credentials, you need to generate the JSON credentials configuration file for your external identity as described above. +Once generated, store the path to this file in the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + +```bash +export GOOGLE_APPLICATION_CREDENTIALS=/path/to/config.json +``` + +The library can now automatically choose the right type of client and initialize credentials from the context provided in the configuration file. + +```js +async function main() { + const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform' + }); + const client = await auth.getClient(); + const projectId = await auth.getProjectId(); + // List all buckets in a project. + const url = `https://storage.googleapis.com/storage/v1/b?project=${projectId}`; + const res = await client.request({ url }); + console.log(res.data); +} +``` + +When using external identities with Application Default Credentials in Node.js, the `roles/browser` role needs to be granted to the service account. +The `Cloud Resource Manager API` should also be enabled on the project. +This is needed since the library will try to auto-discover the project ID from the current environment using the impersonated credential. +To avoid this requirement, the project ID can be explicitly specified on initialization. + +```js +const auth = new GoogleAuth({ + scopes: 'https://www.googleapis.com/auth/cloud-platform', + // Pass the project ID explicitly to avoid the need to grant `roles/browser` to the service account + // or enable Cloud Resource Manager API on the project. + projectId: 'CLOUD_RESOURCE_PROJECT_ID', +}); +``` + +You can also explicitly initialize external account clients using the generated configuration file. + +```js +const {ExternalAccountClient} = require('google-auth-library'); +const jsonConfig = require('/path/to/config.json'); + +async function main() { + const client = ExternalAccountClient.fromJSON(jsonConfig); + client.scopes = ['https://www.googleapis.com/auth/cloud-platform']; + // List all buckets in a project. + const url = `https://storage.googleapis.com/storage/v1/b?project=${projectId}`; + const res = await client.request({url}); + console.log(res.data); +} +``` + +#### Security Considerations +Note that this library does not perform any validation on the token_url, token_info_url, or service_account_impersonation_url fields of the credential configuration. It is not recommended to use a credential configuration that you did not generate with the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain. + +## Working with ID Tokens +### Fetching ID Tokens +If your application is running on Cloud Run or Cloud Functions, or using Cloud Identity-Aware +Proxy (IAP), you will need to fetch an ID token to access your application. For +this, use the method `getIdTokenClient` on the `GoogleAuth` client. + +For invoking Cloud Run services, your service account will need the +[`Cloud Run Invoker`](https://cloud.google.com/run/docs/authenticating/service-to-service) +IAM permission. + +For invoking Cloud Functions, your service account will need the +[`Function Invoker`](https://cloud.google.com/functions/docs/securing/authenticating#function-to-function) +IAM permission. + +``` js +// Make a request to a protected Cloud Run service. +const {GoogleAuth} = require('google-auth-library'); + +async function main() { + const url = 'https://cloud-run-1234-uc.a.run.app'; + const auth = new GoogleAuth(); + const client = await auth.getIdTokenClient(url); + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +A complete example can be found in [`samples/idtokens-serverless.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-serverless.js). + +For invoking Cloud Identity-Aware Proxy, you will need to pass the Client ID +used when you set up your protected resource as the target audience. + +``` js +// Make a request to a protected Cloud Identity-Aware Proxy (IAP) resource +const {GoogleAuth} = require('google-auth-library'); + +async function main() + const targetAudience = 'iap-client-id'; + const url = 'https://iap-url.com'; + const auth = new GoogleAuth(); + const client = await auth.getIdTokenClient(targetAudience); + const res = await client.request({url}); + console.log(res.data); +} + +main().catch(console.error); +``` + +A complete example can be found in [`samples/idtokens-iap.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-iap.js). + +### Verifying ID Tokens + +If you've [secured your IAP app with signed headers](https://cloud.google.com/iap/docs/signed-headers-howto), +you can use this library to verify the IAP header: + +```js +const {OAuth2Client} = require('google-auth-library'); +// Expected audience for App Engine. +const expectedAudience = `/projects/your-project-number/apps/your-project-id`; +// IAP issuer +const issuers = ['https://cloud.google.com/iap']; +// Verify the token. OAuth2Client throws an Error if verification fails +const oAuth2Client = new OAuth2Client(); +const response = await oAuth2Client.getIapCerts(); +const ticket = await oAuth2Client.verifySignedJwtWithCertsAsync( + idToken, + response.pubkeys, + expectedAudience, + issuers +); + +// Print out the info contained in the IAP ID token +console.log(ticket) +``` + +A complete example can be found in [`samples/verifyIdToken-iap.js`](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken-iap.js). + +## Impersonated Credentials Client + +Google Cloud Impersonated credentials used for [Creating short-lived service account credentials](https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials). + +Provides authentication for applications where local credentials impersonates a remote service account using [IAM Credentials API](https://cloud.google.com/iam/docs/reference/credentials/rest). + +An Impersonated Credentials Client is instantiated with a `sourceClient`. This +client should use credentials that have the "Service Account Token Creator" role (`roles/iam.serviceAccountTokenCreator`), +and should authenticate with the `https://www.googleapis.com/auth/cloud-platform`, or `https://www.googleapis.com/auth/iam` scopes. + +`sourceClient` is used by the Impersonated +Credentials Client to impersonate a target service account with a specified +set of scopes. + +### Sample Usage + +```javascript +const { GoogleAuth, Impersonated } = require('google-auth-library'); +const { SecretManagerServiceClient } = require('@google-cloud/secret-manager'); + +async function main() { + + // Acquire source credentials: + const auth = new GoogleAuth(); + const client = await auth.getClient(); + + // Impersonate new credentials: + let targetClient = new Impersonated({ + sourceClient: client, + targetPrincipal: 'impersonated-account@projectID.iam.gserviceaccount.com', + lifetime: 30, + delegates: [], + targetScopes: ['https://www.googleapis.com/auth/cloud-platform'] + }); + + // Get impersonated credentials: + const authHeaders = await targetClient.getRequestHeaders(); + // Do something with `authHeaders.Authorization`. + + // Use impersonated credentials: + const url = 'https://www.googleapis.com/storage/v1/b?project=anotherProjectID' + const resp = await targetClient.request({ url }); + for (const bucket of resp.data.items) { + console.log(bucket.name); + } + + // Use impersonated credentials with google-cloud client library + // Note: this works only with certain cloud client libraries utilizing gRPC + // e.g., SecretManager, KMS, AIPlatform + // will not currently work with libraries using REST, e.g., Storage, Compute + const smClient = new SecretManagerServiceClient({ + projectId: anotherProjectID, + auth: { + getClient: () => targetClient, + }, + }); + const secretName = 'projects/anotherProjectNumber/secrets/someProjectName/versions/1'; + const [accessResponse] = await smClient.accessSecretVersion({ + name: secretName, + }); + + const responsePayload = accessResponse.payload.data.toString('utf8'); + // Do something with the secret contained in `responsePayload`. +}; + +main(); +``` + +## Downscoped Client + +[Downscoping with Credential Access Boundaries](https://cloud.google.com/iam/docs/downscoping-short-lived-credentials) is used to restrict the Identity and Access Management (IAM) permissions that a short-lived credential can use. + +The `DownscopedClient` class can be used to produce a downscoped access token from a +`CredentialAccessBoundary` and a source credential. The Credential Access Boundary specifies which resources the newly created credential can access, as well as an upper bound on the permissions that are available on each resource. Using downscoped credentials ensures tokens in flight always have the least privileges, e.g. Principle of Least Privilege. + +> Notice: Only Cloud Storage supports Credential Access Boundaries for now. + +### Sample Usage +There are two entities needed to generate and use credentials generated from +Downscoped Client with Credential Access Boundaries. + +- Token broker: This is the entity with elevated permissions. This entity has the permissions needed to generate downscoped tokens. The common pattern of usage is to have a token broker with elevated access generate these downscoped credentials from higher access source credentials and pass the downscoped short-lived access tokens to a token consumer via some secure authenticated channel for limited access to Google Cloud Storage resources. + +``` js +const {GoogleAuth, DownscopedClient} = require('google-auth-library'); +// Define CAB rules which will restrict the downscoped token to have readonly +// access to objects starting with "customer-a" in bucket "bucket_name". +const cabRules = { + accessBoundary: { + accessBoundaryRules: [ + { + availableResource: `//storage.googleapis.com/projects/_/buckets/bucket_name`, + availablePermissions: ['inRole:roles/storage.objectViewer'], + availabilityCondition: { + expression: + `resource.name.startsWith('projects/_/buckets/` + + `bucket_name/objects/customer-a)` + } + }, + ], + }, +}; + +// This will use ADC to get the credentials used for the downscoped client. +const googleAuth = new GoogleAuth({ + scopes: ['https://www.googleapis.com/auth/cloud-platform'] +}); + +// Obtain an authenticated client via ADC. +const client = await googleAuth.getClient(); + +// Use the client to create a DownscopedClient. +const cabClient = new DownscopedClient(client, cab); + +// Refresh the tokens. +const refreshedAccessToken = await cabClient.getAccessToken(); + +// This will need to be passed to the token consumer. +access_token = refreshedAccessToken.token; +expiry_date = refreshedAccessToken.expirationTime; +``` + +A token broker can be set up on a server in a private network. Various workloads +(token consumers) in the same network will send authenticated requests to that broker for downscoped tokens to access or modify specific google cloud storage buckets. + +The broker will instantiate downscoped credentials instances that can be used to generate short lived downscoped access tokens which will be passed to the token consumer. + +- Token consumer: This is the consumer of the downscoped tokens. This entity does not have the direct ability to generate access tokens and instead relies on the token broker to provide it with downscoped tokens to run operations on GCS buckets. It is assumed that the downscoped token consumer may have its own mechanism to authenticate itself with the token broker. + +``` js +const {OAuth2Client} = require('google-auth-library'); +const {Storage} = require('@google-cloud/storage'); + +// Create the OAuth credentials (the consumer). +const oauth2Client = new OAuth2Client(); +// We are defining a refresh handler instead of a one-time access +// token/expiry pair. +// This will allow the consumer to obtain new downscoped tokens on +// demand every time a token is expired, without any additional code +// changes. +oauth2Client.refreshHandler = async () => { + // The common pattern of usage is to have a token broker pass the + // downscoped short-lived access tokens to a token consumer via some + // secure authenticated channel. + const refreshedAccessToken = await cabClient.getAccessToken(); + return { + access_token: refreshedAccessToken.token, + expiry_date: refreshedAccessToken.expirationTime, + } +}; + +// Use the consumer client to define storageOptions and create a GCS object. +const storageOptions = { + projectId: 'my_project_id', + authClient: oauth2Client, +}; + +const storage = new Storage(storageOptions); + +const downloadFile = await storage + .bucket('bucket_name') + .file('customer-a-data.txt') + .download(); +console.log(downloadFile.toString('utf8')); + +main().catch(console.error); +``` + + +## Samples + +Samples are in the [`samples/`](https://github.com/googleapis/google-auth-library-nodejs/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. + +| Sample | Source Code | Try it | +| --------------------------- | --------------------------------- | ------ | +| Adc | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/adc.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/adc.js,samples/README.md) | +| Authenticate Explicit | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/authenticateExplicit.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/authenticateExplicit.js,samples/README.md) | +| Authenticate Implicit With Adc | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/authenticateImplicitWithAdc.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/authenticateImplicitWithAdc.js,samples/README.md) | +| Compute | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/compute.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/compute.js,samples/README.md) | +| Credentials | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/credentials.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/credentials.js,samples/README.md) | +| Downscopedclient | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/downscopedclient.js,samples/README.md) | +| Headers | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/headers.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/headers.js,samples/README.md) | +| Id Token From Impersonated Credentials | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromImpersonatedCredentials.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromImpersonatedCredentials.js,samples/README.md) | +| Id Token From Metadata Server | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromMetadataServer.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromMetadataServer.js,samples/README.md) | +| Id Token From Service Account | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idTokenFromServiceAccount.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idTokenFromServiceAccount.js,samples/README.md) | +| ID Tokens for Identity-Aware Proxy (IAP) | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-iap.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idtokens-iap.js,samples/README.md) | +| ID Tokens for Serverless | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/idtokens-serverless.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/idtokens-serverless.js,samples/README.md) | +| Jwt | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/jwt.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/jwt.js,samples/README.md) | +| Keepalive | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/keepalive.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/keepalive.js,samples/README.md) | +| Keyfile | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/keyfile.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/keyfile.js,samples/README.md) | +| Oauth2-code Verifier | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/oauth2-codeVerifier.js,samples/README.md) | +| Oauth2 | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/oauth2.js,samples/README.md) | +| Sign Blob | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/signBlob.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/signBlob.js,samples/README.md) | +| Sign Blob Impersonated | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/signBlobImpersonated.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/signBlobImpersonated.js,samples/README.md) | +| Verify Google Id Token | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyGoogleIdToken.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyGoogleIdToken.js,samples/README.md) | +| Verifying ID Tokens from Identity-Aware Proxy (IAP) | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken-iap.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyIdToken-iap.js,samples/README.md) | +| Verify Id Token | [source code](https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/verifyIdToken.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-auth-library-nodejs&page=editor&open_in_editor=samples/verifyIdToken.js,samples/README.md) | + + + +The [Google Auth Library Node.js Client API Reference][client-docs] documentation +also contains samples. + +## Supported Node.js Versions + +Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). +Libraries are compatible with all current _active_ and _maintenance_ versions of +Node.js. +If you are using an end-of-life version of Node.js, we recommend that you update +as soon as possible to an actively supported LTS version. + +Google's client libraries support legacy versions of Node.js runtimes on a +best-efforts basis with the following warnings: + +* Legacy versions are not tested in continuous integration. +* Some security patches and features cannot be backported. +* Dependencies cannot be kept up-to-date. + +Client libraries targeting some end-of-life versions of Node.js are available, and +can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). +The dist-tags follow the naming convention `legacy-(version)`. +For example, `npm install google-auth-library@legacy-8` installs client libraries +for versions compatible with Node.js 8. + +## Versioning + +This library follows [Semantic Versioning](http://semver.org/). + + + +This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways +unless absolutely necessary (e.g. because of critical security issues) or with +an extensive deprecation period. Issues and requests against **stable** libraries +are addressed with the highest priority. + + + + + + +More Information: [Google Cloud Platform Launch Stages][launch_stages] + +[launch_stages]: https://cloud.google.com/terms/launch-stages + +## Contributing + +Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/google-auth-library-nodejs/blob/main/CONTRIBUTING.md). + +Please note that this `README.md`, the `samples/README.md`, +and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) +are generated from a central template. To edit one of these files, make an edit +to its templates in +[directory](https://github.com/googleapis/synthtool). + +## License + +Apache Version 2.0 + +See [LICENSE](https://github.com/googleapis/google-auth-library-nodejs/blob/main/LICENSE) + +[client-docs]: https://cloud.google.com/nodejs/docs/reference/google-auth-library/latest +[product-docs]: https://cloud.google.com/docs/authentication/ +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png +[projects]: https://console.cloud.google.com/project +[billing]: https://support.google.com/cloud/answer/6293499#enable-billing + +[auth]: https://cloud.google.com/docs/authentication/getting-started diff --git a/node_modules/google-auth-library/build/src/auth/authclient.d.ts b/node_modules/google-auth-library/build/src/auth/authclient.d.ts new file mode 100644 index 0000000..84c9d5e --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/authclient.d.ts @@ -0,0 +1,179 @@ +/// +import { EventEmitter } from 'events'; +import { Gaxios, GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { Transporter } from '../transporters'; +import { Credentials } from './credentials'; +import { GetAccessTokenResponse, Headers } from './oauth2client'; +import { OriginalAndCamel } from '../util'; +/** + * Base auth configurations (e.g. from JWT or `.json` files) with conventional + * camelCased options. + * + * @privateRemarks + * + * This interface is purposely not exported so that it can be removed once + * {@link https://github.com/microsoft/TypeScript/issues/50715} has been + * resolved. Then, we can use {@link OriginalAndCamel} to shrink this interface. + * + * Tracking: {@link https://github.com/googleapis/google-auth-library-nodejs/issues/1686} + */ +interface AuthJSONOptions { + /** + * The project ID corresponding to the current credentials if available. + */ + project_id: string | null; + /** + * An alias for {@link AuthJSONOptions.project_id `project_id`}. + */ + projectId: AuthJSONOptions['project_id']; + /** + * The quota project ID. The quota project can be used by client libraries for the billing purpose. + * See {@link https://cloud.google.com/docs/quota Working with quotas} + */ + quota_project_id: string; + /** + * An alias for {@link AuthJSONOptions.quota_project_id `quota_project_id`}. + */ + quotaProjectId: AuthJSONOptions['quota_project_id']; + /** + * The default service domain for a given Cloud universe. + */ + universe_domain: string; + /** + * An alias for {@link AuthJSONOptions.universe_domain `universe_domain`}. + */ + universeDomain: AuthJSONOptions['universe_domain']; +} +/** + * Base `AuthClient` configuration. + * + * The camelCased options are aliases of the snake_cased options, supporting both + * JSON API and JS conventions. + */ +export interface AuthClientOptions extends Partial> { + credentials?: Credentials; + /** + * A `Gaxios` or `Transporter` instance to use for `AuthClient` requests. + */ + transporter?: Gaxios | Transporter; + /** + * Provides default options to the transporter, such as {@link GaxiosOptions.agent `agent`} or + * {@link GaxiosOptions.retryConfig `retryConfig`}. + */ + transporterOptions?: GaxiosOptions; + /** + * The expiration threshold in milliseconds before forcing token refresh of + * unexpired tokens. + */ + eagerRefreshThresholdMillis?: number; + /** + * Whether to attempt to refresh tokens on status 401/403 responses + * even if an attempt is made to refresh the token preemptively based + * on the expiry_date. + */ + forceRefreshOnFailure?: boolean; +} +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +export declare const DEFAULT_UNIVERSE = "googleapis.com"; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +export declare const DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS: number; +/** + * Defines the root interface for all clients that generate credentials + * for calling Google APIs. All clients should implement this interface. + */ +export interface CredentialsClient { + projectId?: AuthClientOptions['projectId']; + eagerRefreshThresholdMillis: NonNullable; + forceRefreshOnFailure: NonNullable; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + getAccessToken(): Promise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + * @param url The URI being authorized. + */ + getRequestHeaders(url?: string): Promise; + /** + * Provides an alternative Gaxios request implementation with auth credentials + */ + request(opts: GaxiosOptions): GaxiosPromise; + /** + * Sets the auth credentials. + */ + setCredentials(credentials: Credentials): void; + /** + * Subscribes a listener to the tokens event triggered when a token is + * generated. + * + * @param event The tokens event to subscribe to. + * @param listener The listener that triggers on event trigger. + * @return The current client instance. + */ + on(event: 'tokens', listener: (tokens: Credentials) => void): this; +} +export declare interface AuthClient { + on(event: 'tokens', listener: (tokens: Credentials) => void): this; +} +export declare abstract class AuthClient extends EventEmitter implements CredentialsClient { + projectId?: string | null; + /** + * The quota project ID. The quota project can be used by client libraries for the billing purpose. + * See {@link https://cloud.google.com/docs/quota Working with quotas} + */ + quotaProjectId?: string; + transporter: Transporter; + credentials: Credentials; + eagerRefreshThresholdMillis: number; + forceRefreshOnFailure: boolean; + universeDomain: string; + constructor(opts?: AuthClientOptions); + /** + * Provides an alternative Gaxios request implementation with auth credentials + */ + abstract request(opts: GaxiosOptions): GaxiosPromise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + * @param url The URI being authorized. + */ + abstract getRequestHeaders(url?: string): Promise; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + abstract getAccessToken(): Promise<{ + token?: string | null; + res?: GaxiosResponse | null; + }>; + /** + * Sets the auth credentials. + */ + setCredentials(credentials: Credentials): void; + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + protected addSharedMetadataHeaders(headers: Headers): Headers; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/authclient.js b/node_modules/google-auth-library/build/src/auth/authclient.js new file mode 100644 index 0000000..5b6758d --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/authclient.js @@ -0,0 +1,79 @@ +"use strict"; +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = require("events"); +const transporters_1 = require("../transporters"); +const util_1 = require("../util"); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } +} +exports.AuthClient = AuthClient; diff --git a/node_modules/google-auth-library/build/src/auth/awsclient.d.ts b/node_modules/google-auth-library/build/src/auth/awsclient.d.ts new file mode 100644 index 0000000..ae9fbfb --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsclient.d.ts @@ -0,0 +1,94 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { AuthClientOptions } from './authclient'; +/** + * AWS credentials JSON interface. This is used for AWS workloads. + */ +export interface AwsClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + environment_id: string; + region_url?: string; + url?: string; + regional_cred_verification_url: string; + imdsv2_session_token_url?: string; + }; +} +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +export declare class AwsClient extends BaseExternalAccountClient { + private readonly environmentId; + private readonly regionUrl?; + private readonly securityCredentialsUrl?; + private readonly regionalCredVerificationUrl; + private readonly imdsV2SessionTokenUrl?; + private awsRequestSigner; + private region; + static AWS_EC2_METADATA_IPV4_ADDRESS: string; + static AWS_EC2_METADATA_IPV6_ADDRESS: string; + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options: AwsClientOptions, additionalOptions?: AuthClientOptions); + private validateEnvironmentId; + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + private getImdsV2SessionToken; + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + private getAwsRegion; + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + private getAwsRoleName; + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + private getAwsSecurityCredentials; + private get regionFromEnv(); + private get securityCredentialsFromEnv(); +} diff --git a/node_modules/google-auth-library/build/src/auth/awsclient.js b/node_modules/google-auth-library/build/src/auth/awsclient.js new file mode 100644 index 0000000..87c8260 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsclient.js @@ -0,0 +1,260 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsClient = void 0; +const awsrequestsigner_1 = require("./awsrequestsigner"); +const baseexternalclient_1 = require("./baseexternalclient"); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + this.environmentId = options.credential_source.environment_id; + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + this.regionUrl = options.credential_source.region_url; + // This is only required if AWS security credentials are not available in + // environment variables. + this.securityCredentialsUrl = options.credential_source.url; + this.regionalCredVerificationUrl = + options.credential_source.regional_cred_verification_url; + this.imdsV2SessionTokenUrl = + options.credential_source.imdsv2_session_token_url; + this.awsRequestSigner = null; + this.region = ''; + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + validateEnvironmentId() { + var _a; + const match = (_a = this.environmentId) === null || _a === void 0 ? void 0 : _a.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this uses a serialized AWS signed request to the STS GetCallerIdentity + * endpoint. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + const metadataHeaders = {}; + // Only retrieve the IMDSv2 session token if both the security credentials and region are + // not retrievable through the environment. + // The credential config contains all the URLs by default but clients may be running this + // where the metadata server is not available and returning the credentials through the environment. + // Removing this check may break them. + if (!this.regionFromEnv && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await this.getImdsV2SessionToken(); + } + this.region = await this.getAwsRegion(metadataHeaders); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (this.securityCredentialsFromEnv) { + return this.securityCredentialsFromEnv; + } + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await this.getImdsV2SessionToken(); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await this.getAwsRoleName(metadataHeaders); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await this.getAwsSecurityCredentials(roleName, metadataHeaders); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } + /** + * @return A promise that resolves with the IMDSv2 Session Token. + */ + async getImdsV2SessionToken() { + const opts = { + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the current AWS region. + */ + async getAwsRegion(headers) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (this.regionFromEnv) { + return this.regionFromEnv; + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ + async getAwsRoleName(headers) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await this.transporter.request(opts); + return response.data; + } + /** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ + async getAwsSecurityCredentials(roleName, headers) { + const response = await this.transporter.request({ + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; + } + get regionFromEnv() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); + } + get securityCredentialsFromEnv() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; + } +} +exports.AwsClient = AwsClient; +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; diff --git a/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts new file mode 100644 index 0000000..3467e97 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.d.ts @@ -0,0 +1,40 @@ +import { GaxiosOptions } from 'gaxios'; +/** + * Interface defining AWS security credentials. + * These are either determined from AWS security_credentials endpoint or + * AWS environment variables. + */ +export interface AwsSecurityCredentials { + accessKeyId: string; + secretAccessKey: string; + token?: string; +} +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +export declare class AwsRequestSigner { + private readonly getCredentials; + private readonly region; + private readonly crypto; + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials: () => Promise, region: string); + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + getRequestOptions(amzOptions: GaxiosOptions): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js new file mode 100644 index 0000000..882a9a9 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/awsrequestsigner.js @@ -0,0 +1,209 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AwsRequestSigner = void 0; +const crypto_1 = require("../crypto/crypto"); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} diff --git a/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts b/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts new file mode 100644 index 0000000..0d34f58 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/baseexternalclient.d.ts @@ -0,0 +1,238 @@ +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { Credentials } from './credentials'; +import { AuthClient, AuthClientOptions } from './authclient'; +import { BodyResponseCallback } from '../transporters'; +import { GetAccessTokenResponse, Headers } from './oauth2client'; +import { SnakeToCamelObject } from '../util'; +/** + * Offset to take into account network delays and server clock skews. + */ +export declare const EXPIRATION_TIME_OFFSET: number; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +export declare const EXTERNAL_ACCOUNT_TYPE = "external_account"; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +export declare const CLOUD_RESOURCE_MANAGER = "https://cloudresourcemanager.googleapis.com/v1/projects/"; +/** + * For backwards compatibility. + */ +export { DEFAULT_UNIVERSE } from './authclient'; +export interface SharedExternalAccountClientOptions extends AuthClientOptions { + audience: string; + token_url: string; +} +/** + * Base external account credentials json interface. + */ +export interface BaseExternalAccountClientOptions extends SharedExternalAccountClientOptions { + type: string; + subject_token_type: string; + service_account_impersonation_url?: string; + service_account_impersonation?: { + token_lifetime_seconds?: number; + }; + token_info_url?: string; + client_id?: string; + client_secret?: string; + workforce_pool_user_project?: string; + scopes?: string[]; + /** + * @example + * https://cloudresourcemanager.googleapis.com/v1/projects/ + **/ + cloud_resource_manager_url?: string | URL; +} +/** + * Interface defining the successful response for iamcredentials + * generateAccessToken API. + * https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateAccessToken + */ +export interface IamGenerateAccessTokenResponse { + accessToken: string; + expireTime: string; +} +/** + * Interface defining the project information response returned by the cloud + * resource manager. + * https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project + */ +export interface ProjectInfo { + projectNumber: string; + projectId: string; + lifecycleState: string; + name: string; + createTime?: string; + parent: { + [key: string]: any; + }; +} +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +export declare abstract class BaseExternalAccountClient extends AuthClient { + /** + * OAuth scopes for the GCP access token to use. When not provided, + * the default https://www.googleapis.com/auth/cloud-platform is + * used. + */ + scopes?: string | string[]; + private cachedAccessToken; + protected readonly audience: string; + protected readonly subjectTokenType: string; + private readonly serviceAccountImpersonationUrl?; + private readonly serviceAccountImpersonationLifetime?; + private readonly stsCredential; + private readonly clientAuth?; + private readonly workforcePoolUserProject?; + projectNumber: string | null; + private readonly configLifetimeRequested; + protected credentialSourceType?: string; + /** + * @example + * ```ts + * new URL('https://cloudresourcemanager.googleapis.com/v1/projects/'); + * ``` + */ + protected cloudResourceManagerURL: URL | string; + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options: BaseExternalAccountClientOptions | SnakeToCamelObject, additionalOptions?: AuthClientOptions); + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail(): string | null; + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials: Credentials): void; + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This abstract method needs to be implemented by subclasses depending on + * the type of external credential used. + * @return A promise that resolves with the external subject token. + */ + abstract retrieveSubjectToken(): Promise; + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + getAccessToken(): Promise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + getRequestHeaders(): Promise; + /** + * Provides a request implementation with OAuth 2.0 flow. In cases of + * HTTP 401 and 403 responses, it automatically asks for a new access token + * and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return A promise that resolves with the HTTP response when no callback is + * provided. + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + getProjectId(): Promise; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + private getProjectNumber; + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + private getImpersonatedAccessToken; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; + /** + * @return The list of scopes for the requested GCP access token. + */ + private getScopesArray; + private getMetricsHeaderValue; +} diff --git a/node_modules/google-auth-library/build/src/auth/baseexternalclient.js b/node_modules/google-auth-library/build/src/auth/baseexternalclient.js new file mode 100644 index 0000000..df8e8a1 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/baseexternalclient.js @@ -0,0 +1,430 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = require("stream"); +const authclient_1 = require("./authclient"); +const sts = require("./stscredentials"); +const util_1 = require("../util"); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = require('../../../package.json'); +/** + * For backwards compatibility. + */ +var authclient_2 = require("./authclient"); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", { enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } }); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + const opts = (0, util_1.originalOrCamelOptions)(options); + if (opts.get('type') !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = opts.get('token_url'); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; diff --git a/node_modules/google-auth-library/build/src/auth/computeclient.d.ts b/node_modules/google-auth-library/build/src/auth/computeclient.d.ts new file mode 100644 index 0000000..2d88366 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/computeclient.d.ts @@ -0,0 +1,37 @@ +import { GaxiosError } from 'gaxios'; +import { GetTokenResponse, OAuth2Client, OAuth2ClientOptions } from './oauth2client'; +export interface ComputeOptions extends OAuth2ClientOptions { + /** + * The service account email to use, or 'default'. A Compute Engine instance + * may have multiple service accounts. + */ + serviceAccountEmail?: string; + /** + * The scopes that will be requested when acquiring service account + * credentials. Only applicable to modern App Engine and Cloud Function + * runtimes as of March 2019. + */ + scopes?: string | string[]; +} +export declare class Compute extends OAuth2Client { + readonly serviceAccountEmail: string; + scopes: string[]; + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options?: ComputeOptions); + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + fetchIdToken(targetAudience: string): Promise; + protected wrapError(e: GaxiosError): void; +} diff --git a/node_modules/google-auth-library/build/src/auth/computeclient.js b/node_modules/google-auth-library/build/src/auth/computeclient.js new file mode 100644 index 0000000..4554e03 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/computeclient.js @@ -0,0 +1,117 @@ +"use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Compute = void 0; +const gaxios_1 = require("gaxios"); +const gcpMetadata = require("gcp-metadata"); +const oauth2client_1 = require("./oauth2client"); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; diff --git a/node_modules/google-auth-library/build/src/auth/credentials.d.ts b/node_modules/google-auth-library/build/src/auth/credentials.d.ts new file mode 100644 index 0000000..896b014 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/credentials.d.ts @@ -0,0 +1,75 @@ +export interface Credentials { + /** + * This field is only present if the access_type parameter was set to offline in the authentication request. For details, see Refresh tokens. + */ + refresh_token?: string | null; + /** + * The time in ms at which this token is thought to expire. + */ + expiry_date?: number | null; + /** + * A token that can be sent to a Google API. + */ + access_token?: string | null; + /** + * Identifies the type of token returned. At this time, this field always has the value Bearer. + */ + token_type?: string | null; + /** + * A JWT that contains identity information about the user that is digitally signed by Google. + */ + id_token?: string | null; + /** + * The scopes of access granted by the access_token expressed as a list of space-delimited, case-sensitive strings. + */ + scope?: string; +} +export interface CredentialRequest { + /** + * This field is only present if the access_type parameter was set to offline in the authentication request. For details, see Refresh tokens. + */ + refresh_token?: string; + /** + * A token that can be sent to a Google API. + */ + access_token?: string; + /** + * Identifies the type of token returned. At this time, this field always has the value Bearer. + */ + token_type?: string; + /** + * The remaining lifetime of the access token in seconds. + */ + expires_in?: number; + /** + * A JWT that contains identity information about the user that is digitally signed by Google. + */ + id_token?: string; + /** + * The scopes of access granted by the access_token expressed as a list of space-delimited, case-sensitive strings. + */ + scope?: string; +} +export interface JWTInput { + type?: string; + client_email?: string; + private_key?: string; + private_key_id?: string; + project_id?: string; + client_id?: string; + client_secret?: string; + refresh_token?: string; + quota_project_id?: string; + universe_domain?: string; +} +export interface ImpersonatedJWTInput { + type?: string; + source_credentials?: JWTInput; + service_account_impersonation_url?: string; + delegates?: string[]; +} +export interface CredentialBody { + client_email?: string; + private_key?: string; + universe_domain?: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/credentials.js b/node_modules/google-auth-library/build/src/auth/credentials.js new file mode 100644 index 0000000..8db999f --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/credentials.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts b/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts new file mode 100644 index 0000000..f1879f9 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/downscopedclient.d.ts @@ -0,0 +1,139 @@ +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { BodyResponseCallback } from '../transporters'; +import { Credentials } from './credentials'; +import { AuthClient, AuthClientOptions } from './authclient'; +import { GetAccessTokenResponse, Headers } from './oauth2client'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +export declare const MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +export declare const EXPIRATION_TIME_OFFSET: number; +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * Internal interface for tracking and returning the Downscoped access token + * expiration time in epoch time (seconds). + */ +interface DownscopedAccessTokenResponse extends GetAccessTokenResponse { + expirationTime?: number | null; +} +/** + * Defines an upper bound of permissions available for a GCP credential. + */ +export interface CredentialAccessBoundary { + accessBoundary: { + accessBoundaryRules: AccessBoundaryRule[]; + }; +} +/** Defines an upper bound of permissions on a particular resource. */ +interface AccessBoundaryRule { + availablePermissions: string[]; + availableResource: string; + availabilityCondition?: AvailabilityCondition; +} +/** + * An optional condition that can be used as part of a + * CredentialAccessBoundary to further restrict permissions. + */ +interface AvailabilityCondition { + expression: string; + title?: string; + description?: string; +} +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +export declare class DownscopedClient extends AuthClient { + private readonly authClient; + private readonly credentialAccessBoundary; + private cachedDownscopedAccessToken; + private readonly stsCredential; + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient: AuthClient, credentialAccessBoundary: CredentialAccessBoundary, additionalOptions?: AuthClientOptions, quotaProjectId?: string); + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials: Credentials): void; + getAccessToken(): Promise; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + getRequestHeaders(): Promise; + /** + * Provides a request implementation with OAuth 2.0 flow. In cases of + * HTTP 401 and 403 responses, it automatically asks for a new access token + * and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return A promise that resolves with the HTTP response when no callback + * is provided. + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/downscopedclient.js b/node_modules/google-auth-library/build/src/auth/downscopedclient.js new file mode 100644 index 0000000..9c3b626 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/downscopedclient.js @@ -0,0 +1,262 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = require("stream"); +const authclient_1 = require("./authclient"); +const sts = require("./stscredentials"); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; diff --git a/node_modules/google-auth-library/build/src/auth/envDetect.d.ts b/node_modules/google-auth-library/build/src/auth/envDetect.d.ts new file mode 100644 index 0000000..2fc01c6 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/envDetect.d.ts @@ -0,0 +1,10 @@ +export declare enum GCPEnv { + APP_ENGINE = "APP_ENGINE", + KUBERNETES_ENGINE = "KUBERNETES_ENGINE", + CLOUD_FUNCTIONS = "CLOUD_FUNCTIONS", + COMPUTE_ENGINE = "COMPUTE_ENGINE", + CLOUD_RUN = "CLOUD_RUN", + NONE = "NONE" +} +export declare function clear(): void; +export declare function getEnv(): Promise; diff --git a/node_modules/google-auth-library/build/src/auth/envDetect.js b/node_modules/google-auth-library/build/src/auth/envDetect.js new file mode 100644 index 0000000..0d9c973 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/envDetect.js @@ -0,0 +1,89 @@ +"use strict"; +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEnv = exports.clear = exports.GCPEnv = void 0; +const gcpMetadata = require("gcp-metadata"); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +exports.clear = clear; +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +exports.getEnv = getEnv; +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} diff --git a/node_modules/google-auth-library/build/src/auth/executable-response.d.ts b/node_modules/google-auth-library/build/src/auth/executable-response.d.ts new file mode 100644 index 0000000..259d276 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/executable-response.d.ts @@ -0,0 +1,137 @@ +/** + * Interface defining the JSON formatted response of a 3rd party executable + * used by the pluggable auth client. + */ +export interface ExecutableResponseJson { + /** + * The version of the JSON response. Only version 1 is currently supported. + * Always required. + */ + version: number; + /** + * Whether the executable ran successfully. Always required. + */ + success: boolean; + /** + * The epoch time for expiration of the token in seconds, required for + * successful responses. + */ + expiration_time?: number; + /** + * The type of subject token in the response, currently supported values are: + * urn:ietf:params:oauth:token-type:saml2 + * urn:ietf:params:oauth:token-type:id_token + * urn:ietf:params:oauth:token-type:jwt + */ + token_type?: string; + /** + * The error code from the executable, required when unsuccessful. + */ + code?: string; + /** + * The error message from the executable, required when unsuccessful. + */ + message?: string; + /** + * The ID token to be used as a subject token when token_type is id_token or jwt. + */ + id_token?: string; + /** + * The response to be used as a subject token when token_type is saml2. + */ + saml_response?: string; +} +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +export declare class ExecutableResponse { + /** + * The version of the Executable response. Only version 1 is currently supported. + */ + readonly version: number; + /** + * Whether the executable ran successfully. + */ + readonly success: boolean; + /** + * The epoch time for expiration of the token in seconds. + */ + readonly expirationTime?: number; + /** + * The type of subject token in the response, currently supported values are: + * urn:ietf:params:oauth:token-type:saml2 + * urn:ietf:params:oauth:token-type:id_token + * urn:ietf:params:oauth:token-type:jwt + */ + readonly tokenType?: string; + /** + * The error code from the executable. + */ + readonly errorCode?: string; + /** + * The error message from the executable. + */ + readonly errorMessage?: string; + /** + * The subject token from the executable, format depends on tokenType. + */ + readonly subjectToken?: string; + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson: ExecutableResponseJson); + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid(): boolean; + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired(): boolean; +} +/** + * An error thrown by the ExecutableResponse class. + */ +export declare class ExecutableResponseError extends Error { + constructor(message: string); +} +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +export declare class InvalidVersionFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +export declare class InvalidSuccessFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +export declare class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +export declare class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +export declare class InvalidCodeFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +export declare class InvalidMessageFieldError extends ExecutableResponseError { +} +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +export declare class InvalidSubjectTokenError extends ExecutableResponseError { +} diff --git a/node_modules/google-auth-library/build/src/auth/executable-response.js b/node_modules/google-auth-library/build/src/auth/executable-response.js new file mode 100644 index 0000000..be4e818 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/executable-response.js @@ -0,0 +1,146 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; diff --git a/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts new file mode 100644 index 0000000..bfdcd89 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.d.ts @@ -0,0 +1,78 @@ +import { AuthClient, AuthClientOptions } from './authclient'; +import { Headers } from './oauth2client'; +import { BodyResponseCallback } from '../transporters'; +import { GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import { Credentials } from './credentials'; +import { SharedExternalAccountClientOptions } from './baseexternalclient'; +/** + * The credentials JSON file type for external account authorized user clients. + */ +export declare const EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = "external_account_authorized_user"; +/** + * External Account Authorized User Credentials JSON interface. + */ +export interface ExternalAccountAuthorizedUserClientOptions extends SharedExternalAccountClientOptions { + type: typeof EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE; + client_id: string; + client_secret: string; + refresh_token: string; + token_info_url: string; + revoke_url?: string; +} +/** + * Internal interface for tracking the access token expiration time. + */ +interface CredentialsWithResponse extends Credentials { + res?: GaxiosResponse | null; +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +export declare class ExternalAccountAuthorizedUserClient extends AuthClient { + private cachedAccessToken; + private readonly externalAccountAuthorizedUserHandler; + private refreshToken; + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options: ExternalAccountAuthorizedUserClientOptions, additionalOptions?: AuthClientOptions); + getAccessToken(): Promise<{ + token?: string | null; + res?: GaxiosResponse | null; + }>; + getRequestHeaders(): Promise; + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + protected refreshAccessTokenAsync(): Promise; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + private isExpired; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js new file mode 100644 index 0000000..9158351 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalAccountAuthorizedUserClient.js @@ -0,0 +1,236 @@ +"use strict"; +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = require("./authclient"); +const oauth2common_1 = require("./oauth2common"); +const gaxios_1 = require("gaxios"); +const stream = require("stream"); +const baseexternalclient_1 = require("./baseexternalclient"); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super({ ...options, ...additionalOptions }); + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler(options.token_url, this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param retry Whether the current attempt is a retry after a failed attempt. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, retry = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; diff --git a/node_modules/google-auth-library/build/src/auth/externalclient.d.ts b/node_modules/google-auth-library/build/src/auth/externalclient.d.ts new file mode 100644 index 0000000..90caac5 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalclient.d.ts @@ -0,0 +1,26 @@ +import { BaseExternalAccountClient } from './baseexternalclient'; +import { IdentityPoolClientOptions } from './identitypoolclient'; +import { AwsClientOptions } from './awsclient'; +import { PluggableAuthClientOptions } from './pluggable-auth-client'; +import { AuthClientOptions } from './authclient'; +export type ExternalAccountClientOptions = IdentityPoolClientOptions | AwsClientOptions | PluggableAuthClientOptions; +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +export declare class ExternalAccountClient { + constructor(); + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options: ExternalAccountClientOptions, additionalOptions?: AuthClientOptions): BaseExternalAccountClient | null; +} diff --git a/node_modules/google-auth-library/build/src/auth/externalclient.js b/node_modules/google-auth-library/build/src/auth/externalclient.js new file mode 100644 index 0000000..54860ea --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/externalclient.js @@ -0,0 +1,64 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = require("./baseexternalclient"); +const identitypoolclient_1 = require("./identitypoolclient"); +const awsclient_1 = require("./awsclient"); +const pluggable_auth_client_1 = require("./pluggable-auth-client"); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; diff --git a/node_modules/google-auth-library/build/src/auth/googleauth.d.ts b/node_modules/google-auth-library/build/src/auth/googleauth.d.ts new file mode 100644 index 0000000..19875c7 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/googleauth.d.ts @@ -0,0 +1,342 @@ +/// +import { GaxiosOptions, GaxiosResponse } from 'gaxios'; +import * as stream from 'stream'; +import { DefaultTransporter, Transporter } from '../transporters'; +import { Compute } from './computeclient'; +import { CredentialBody, ImpersonatedJWTInput, JWTInput } from './credentials'; +import { IdTokenClient } from './idtokenclient'; +import { GCPEnv } from './envDetect'; +import { JWT, JWTOptions } from './jwtclient'; +import { Headers, OAuth2ClientOptions } from './oauth2client'; +import { UserRefreshClient, UserRefreshClientOptions } from './refreshclient'; +import { Impersonated, ImpersonatedOptions } from './impersonated'; +import { ExternalAccountClientOptions } from './externalclient'; +import { BaseExternalAccountClient } from './baseexternalclient'; +import { AuthClient, AuthClientOptions } from './authclient'; +import { ExternalAccountAuthorizedUserClient } from './externalAccountAuthorizedUserClient'; +/** + * Defines all types of explicit clients that are determined via ADC JSON + * config file. + */ +export type JSONClient = JWT | UserRefreshClient | BaseExternalAccountClient | ExternalAccountAuthorizedUserClient | Impersonated; +export interface ProjectIdCallback { + (err?: Error | null, projectId?: string | null): void; +} +export interface CredentialCallback { + (err: Error | null, result?: JSONClient): void; +} +export interface ADCCallback { + (err: Error | null, credential?: AuthClient, projectId?: string | null): void; +} +export interface ADCResponse { + credential: AuthClient; + projectId: string | null; +} +export interface GoogleAuthOptions { + /** + * An `AuthClient` to use + */ + authClient?: T; + /** + * Path to a .json, .pem, or .p12 key file + */ + keyFilename?: string; + /** + * Path to a .json, .pem, or .p12 key file + */ + keyFile?: string; + /** + * Object containing client_email and private_key properties, or the + * external account client options. + */ + credentials?: JWTInput | ExternalAccountClientOptions; + /** + * Options object passed to the constructor of the client + */ + clientOptions?: JWTOptions | OAuth2ClientOptions | UserRefreshClientOptions | ImpersonatedOptions; + /** + * Required scopes for the desired API request + */ + scopes?: string | string[]; + /** + * Your project ID. + */ + projectId?: string; + /** + * The default service domain for a given Cloud universe. + * + * This is an ergonomic equivalent to {@link clientOptions}'s `universeDomain` + * property and will be set for all generated {@link AuthClient}s. + */ + universeDomain?: string; +} +export declare const CLOUD_SDK_CLIENT_ID = "764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com"; +export declare class GoogleAuth { + transporter?: Transporter; + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + private checkIsGCE?; + useJWTAccessWithScope?: boolean; + defaultServicePath?: string; + get isGCE(): boolean | undefined; + private _findProjectIdPromise?; + private _cachedProjectId?; + jsonContent: JWTInput | ExternalAccountClientOptions | null; + cachedCredential: JSONClient | Impersonated | Compute | T | null; + /** + * Scopes populated by the client library by default. We differentiate between + * these and user defined scopes when deciding whether to use a self-signed JWT. + */ + defaultScopes?: string | string[]; + private keyFilename?; + private scopes?; + private clientOptions; + /** + * Export DefaultTransporter as a static property of the class. + */ + static DefaultTransporter: typeof DefaultTransporter; + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts?: GoogleAuthOptions); + setGapicJWTValues(client: JWT): void; + /** + * Obtains the default project ID for the application. + * + * Retrieves in the following order of precedence: + * - The `projectId` provided in this object's construction + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + */ + getProjectId(): Promise; + getProjectId(callback: ProjectIdCallback): void; + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + private getProjectIdOptional; + private findAndCacheProjectId; + private getProjectIdAsync; + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + getUniverseDomainFromMetadataServer(): Promise; + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + getUniverseDomain(): Promise; + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + private getAnyScopes; + /** + * Obtains the default service-level credentials for the application. + * @param callback Optional callback. + * @returns Promise that resolves with the ADCResponse (if no callback was + * passed). + */ + getApplicationDefault(): Promise; + getApplicationDefault(callback: ADCCallback): void; + getApplicationDefault(options: AuthClientOptions): Promise; + getApplicationDefault(options: AuthClientOptions, callback: ADCCallback): void; + private getApplicationDefaultAsync; + private prepareAndCacheADC; + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + _checkIsGCE(): Promise; + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + _tryGetApplicationCredentialsFromEnvironmentVariable(options?: AuthClientOptions): Promise; + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + _tryGetApplicationCredentialsFromWellKnownFile(options?: AuthClientOptions): Promise; + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + _getApplicationCredentialsFromFilePath(filePath: string, options?: AuthClientOptions): Promise; + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json: ImpersonatedJWTInput): Impersonated; + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json: JWTInput | ImpersonatedJWTInput, options?: AuthClientOptions): JSONClient; + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + private _cacheClientFromJSON; + /** + * Create a credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: CredentialCallback): void; + fromStream(inputStream: stream.Readable, options: AuthClientOptions): Promise; + fromStream(inputStream: stream.Readable, options: AuthClientOptions, callback: CredentialCallback): void; + private fromStreamAsync; + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey: string, options?: AuthClientOptions): JWT; + /** + * Determines whether the current operating system is Windows. + * @api private + */ + private _isWindows; + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + private getDefaultServiceProjectId; + /** + * Loads the project id from environment variables. + * @api private + */ + private getProductionProjectId; + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + private getFileProjectId; + /** + * Gets the project ID from external account client if available. + */ + private getExternalAccountClientProjectId; + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + private getGCEProjectId; + /** + * The callback function handles a credential object that contains the + * client_email and private_key (if exists). + * getCredentials first checks if the client is using an external account and + * uses the service account email in place of client_email. + * If that doesn't exist, it checks for these values from the user JSON. + * If the user JSON doesn't exist, and the environment is on GCE, it gets the + * client_email from the cloud metadata server. + * @param callback Callback that handles the credential object that contains + * a client_email and optional private key, or the error. + * returned + */ + getCredentials(): Promise; + getCredentials(callback: (err: Error | null, credentials?: CredentialBody) => void): void; + private getCredentialsAsync; + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + getClient(): Promise; + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + getIdTokenClient(targetAudience: string): Promise; + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + getAccessToken(): Promise; + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + getRequestHeaders(url?: string): Promise; + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + authorizeRequest(opts: { + url?: string; + uri?: string; + headers?: Headers; + }): Promise<{ + url?: string | undefined; + uri?: string | undefined; + headers?: Headers | undefined; + }>; + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + request(opts: GaxiosOptions): Promise>; + /** + * Determine the compute environment in which the code is running. + */ + getEnv(): Promise; + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + sign(data: string, endpoint?: string): Promise; + private signBlob; +} +export interface SignBlobResponse { + keyId: string; + signedBlob: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/googleauth.js b/node_modules/google-auth-library/build/src/auth/googleauth.js new file mode 100644 index 0000000..29df9f3 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/googleauth.js @@ -0,0 +1,805 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = require("child_process"); +const fs = require("fs"); +const gcpMetadata = require("gcp-metadata"); +const os = require("os"); +const path = require("path"); +const crypto_1 = require("../crypto/crypto"); +const transporters_1 = require("../transporters"); +const computeclient_1 = require("./computeclient"); +const idtokenclient_1 = require("./idtokenclient"); +const envDetect_1 = require("./envDetect"); +const jwtclient_1 = require("./jwtclient"); +const refreshclient_1 = require("./refreshclient"); +const impersonated_1 = require("./impersonated"); +const externalclient_1 = require("./externalclient"); +const baseexternalclient_1 = require("./baseexternalclient"); +const authclient_1 = require("./authclient"); +const externalAccountAuthorizedUserClient_1 = require("./externalAccountAuthorizedUserClient"); +const util_1 = require("../util"); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +const GoogleAuthExceptionMessages = { + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisify unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts) { + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + this.clientOptions = {}; + opts = opts || {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.jsonContent = opts.credentials || null; + this.clientOptions = opts.clientOptions || {}; + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /* + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe_domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + return await this.prepareAndCacheADC(this.cachedCredential); + } + // Since this is a 'new' ADC to cache we will use the environment variable + // if it's available. We prefer this value over the value from ADC. + const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + // set universe domain for Compute client + if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) { + options.universeDomain = + await this.getUniverseDomainFromMetadataServer(); + } + options.scopes = this.getAnyScopes(); + return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); + } + throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); + } + async prepareAndCacheADC(credential, quotaProjectIdOverride) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d, _e; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + // Create source client for impersonation + const sourceClient = new refreshclient_1.UserRefreshClient(); + sourceClient.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extreact service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + const client = new impersonated_1.Impersonated({ + ...json, + delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [], + sourceClient: sourceClient, + targetPrincipal: targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + return client; + } + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(s); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options) { + options = options || {}; + const client = new jwtclient_1.JWT(options); + client.fromAPIKey(apiKey); + return client; + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (!this.cachedCredential) { + if (this.jsonContent) { + this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + await this.fromStreamAsync(stream, this.clientOptions); + } + else { + await this.getApplicationDefaultAsync(this.clientOptions); + } + } + return this.cachedCredential; + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; diff --git a/node_modules/google-auth-library/build/src/auth/iam.d.ts b/node_modules/google-auth-library/build/src/auth/iam.d.ts new file mode 100644 index 0000000..93470a4 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/iam.d.ts @@ -0,0 +1,23 @@ +export interface RequestMetadata { + 'x-goog-iam-authority-selector': string; + 'x-goog-iam-authorization-token': string; +} +export declare class IAMAuth { + selector: string; + token: string; + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector: string, token: string); + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders(): { + 'x-goog-iam-authority-selector': string; + 'x-goog-iam-authorization-token': string; + }; +} diff --git a/node_modules/google-auth-library/build/src/auth/iam.js b/node_modules/google-auth-library/build/src/auth/iam.js new file mode 100644 index 0000000..e837114 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/iam.js @@ -0,0 +1,41 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; diff --git a/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts b/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts new file mode 100644 index 0000000..7bf7443 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/identitypoolclient.d.ts @@ -0,0 +1,84 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { AuthClientOptions } from './authclient'; +import { SnakeToCamelObject } from '../util'; +type SubjectTokenFormatType = 'json' | 'text'; +/** + * Url-sourced/file-sourced credentials json interface. + * This is used for K8s and Azure workloads. + */ +export interface IdentityPoolClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + file?: string; + url?: string; + headers?: { + [key: string]: string; + }; + format?: { + type: SubjectTokenFormatType; + subject_token_field_name?: string; + }; + }; +} +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +export declare class IdentityPoolClient extends BaseExternalAccountClient { + private readonly file?; + private readonly url?; + private readonly headers?; + private readonly formatType; + private readonly formatSubjectTokenFieldName?; + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options: IdentityPoolClientOptions | SnakeToCamelObject, additionalOptions?: AuthClientOptions); + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + private getTokenFromFile; + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + private getTokenFromUrl; +} +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/identitypoolclient.js b/node_modules/google-auth-library/build/src/auth/identitypoolclient.js new file mode 100644 index 0000000..6b6edd7 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/identitypoolclient.js @@ -0,0 +1,171 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IdentityPoolClient = void 0; +const fs = require("fs"); +const util_1 = require("util"); +const baseexternalclient_1 = require("./baseexternalclient"); +const util_2 = require("../util"); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_2.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const credentialSourceOpts = (0, util_2.originalOrCamelOptions)(credentialSource); + this.file = credentialSourceOpts.get('file'); + this.url = credentialSourceOpts.get('url'); + this.headers = credentialSourceOpts.get('headers'); + if (this.file && this.url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (this.file && !this.url) { + this.credentialSourceType = 'file'; + } + else if (!this.file && this.url) { + this.credentialSourceType = 'url'; + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + const formatOpts = (0, util_2.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + this.formatType = formatOpts.get('type') || 'text'; + this.formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (this.formatType !== 'json' && this.formatType !== 'text') { + throw new Error(`Invalid credential_source format "${this.formatType}"`); + } + if (this.formatType === 'json' && !this.formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this either retrieves the local credential from a file location (k8s + * workload) or by sending a GET request to a local metadata server (Azure + * workloads). + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + if (this.file) { + return await this.getTokenFromFile(this.file, this.formatType, this.formatSubjectTokenFieldName); + } + return await this.getTokenFromUrl(this.url, this.formatType, this.formatSubjectTokenFieldName, this.headers); + } + /** + * Looks up the external subject token in the file path provided and + * resolves with that token. + * @param file The file path where the external credential is located. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromFile(filePath, formatType, formatSubjectTokenFieldName) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = await realpath(filePath); + if (!(await lstat(filePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(filePath, { encoding: 'utf8' }); + if (formatType === 'text') { + subjectToken = rawText; + } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } + /** + * Sends a GET request to the URL provided and resolves with the returned + * external subject token. + * @param url The URL to call to retrieve the subject token. This is typically + * a local metadata server. + * @param formatType The token file or URL response type (JSON or text). + * @param formatSubjectTokenFieldName For JSON response types, this is the + * subject_token field name. For Azure, this is access_token. For text + * response types, this is ignored. + * @param headers The optional additional headers to send with the request to + * the metadata server url. + * @return A promise that resolves with the external subject token. + */ + async getTokenFromUrl(url, formatType, formatSubjectTokenFieldName, headers) { + const opts = { + url, + method: 'GET', + headers, + responseType: formatType, + }; + let subjectToken; + if (formatType === 'text') { + const response = await this.transporter.request(opts); + subjectToken = response.data; + } + else if (formatType === 'json' && formatSubjectTokenFieldName) { + const response = await this.transporter.request(opts); + subjectToken = response.data[formatSubjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.IdentityPoolClient = IdentityPoolClient; diff --git a/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts b/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts new file mode 100644 index 0000000..311d29d --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/idtokenclient.d.ts @@ -0,0 +1,27 @@ +import { OAuth2Client, OAuth2ClientOptions, RequestMetadataResponse } from './oauth2client'; +export interface IdTokenOptions extends OAuth2ClientOptions { + /** + * The client to make the request to fetch an ID token. + */ + idTokenProvider: IdTokenProvider; + /** + * The audience to use when requesting an ID token. + */ + targetAudience: string; +} +export interface IdTokenProvider { + fetchIdToken: (targetAudience: string) => Promise; +} +export declare class IdTokenClient extends OAuth2Client { + targetAudience: string; + idTokenProvider: IdTokenProvider; + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options: IdTokenOptions); + protected getRequestMetadataAsync(url?: string | null): Promise; + private getIdTokenExpiryDate; +} diff --git a/node_modules/google-auth-library/build/src/auth/idtokenclient.js b/node_modules/google-auth-library/build/src/auth/idtokenclient.js new file mode 100644 index 0000000..77e6fa3 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/idtokenclient.js @@ -0,0 +1,55 @@ +"use strict"; +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IdTokenClient = void 0; +const oauth2client_1 = require("./oauth2client"); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; diff --git a/node_modules/google-auth-library/build/src/auth/impersonated.d.ts b/node_modules/google-auth-library/build/src/auth/impersonated.d.ts new file mode 100644 index 0000000..33b6327 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/impersonated.d.ts @@ -0,0 +1,127 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { GetTokenResponse, OAuth2Client, OAuth2ClientOptions } from './oauth2client'; +import { AuthClient } from './authclient'; +import { IdTokenProvider } from './idtokenclient'; +import { SignBlobResponse } from './googleauth'; +export interface ImpersonatedOptions extends OAuth2ClientOptions { + /** + * Client used to perform exchange for impersonated client. + */ + sourceClient?: AuthClient; + /** + * The service account to impersonate. + */ + targetPrincipal?: string; + /** + * Scopes to request during the authorization grant. + */ + targetScopes?: string[]; + /** + * The chained list of delegates required to grant the final access_token. + */ + delegates?: string[]; + /** + * Number of seconds the delegated credential should be valid. + */ + lifetime?: number | 3600; + /** + * API endpoint to fetch token from. + */ + endpoint?: string; +} +export declare const IMPERSONATED_ACCOUNT_TYPE = "impersonated_service_account"; +export interface TokenResponse { + accessToken: string; + expireTime: string; +} +export interface FetchIdTokenOptions { + /** + * Include the service account email in the token. + * If set to `true`, the token will contain `email` and `email_verified` claims. + */ + includeEmail: boolean; +} +export interface FetchIdTokenResponse { + /** The OpenId Connect ID token. */ + token: string; +} +export declare class Impersonated extends OAuth2Client implements IdTokenProvider { + private sourceClient; + private targetPrincipal; + private targetScopes; + private delegates; + private lifetime; + private endpoint; + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options?: ImpersonatedOptions); + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * @return denoting the keyyID and signedBlob in base64 string + */ + sign(blobToSign: string): Promise; + /** The service account email to be impersonated. */ + getTargetPrincipal(): string; + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + protected refreshToken(refreshToken?: string | null): Promise; + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + fetchIdToken(targetAudience: string, options?: FetchIdTokenOptions): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/impersonated.js b/node_modules/google-auth-library/build/src/auth/impersonated.js new file mode 100644 index 0000000..4eee113 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/impersonated.js @@ -0,0 +1,171 @@ +"use strict"; +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = require("./oauth2client"); +const gaxios_1 = require("gaxios"); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * @return denoting the keyyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshToken(refreshToken) { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + }; + const res = await this.sourceClient.request({ + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; diff --git a/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts b/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts new file mode 100644 index 0000000..fb60f81 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtaccess.d.ts @@ -0,0 +1,63 @@ +/// +import * as stream from 'stream'; +import { JWTInput } from './credentials'; +import { Headers } from './oauth2client'; +export interface Claims { + [index: string]: string; +} +export declare class JWTAccess { + email?: string | null; + key?: string | null; + keyId?: string | null; + projectId?: string; + eagerRefreshThresholdMillis: number; + private cache; + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email?: string | null, key?: string | null, keyId?: string | null, eagerRefreshThresholdMillis?: number); + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url?: string, scopes?: string | string[]): string; + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url?: string, additionalClaims?: Claims, scopes?: string | string[]): Headers; + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + private static getExpirationTime; + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a JWTAccess credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error) => void): void; + private fromStreamAsync; +} diff --git a/node_modules/google-auth-library/build/src/auth/jwtaccess.js b/node_modules/google-auth-library/build/src/auth/jwtaccess.js new file mode 100644 index 0000000..664387b --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtaccess.js @@ -0,0 +1,192 @@ +"use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.JWTAccess = void 0; +const jws = require("jws"); +const util_1 = require("../util"); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; diff --git a/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts b/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts new file mode 100644 index 0000000..d27156c --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtclient.d.ts @@ -0,0 +1,110 @@ +/// +import { GoogleToken } from 'gtoken'; +import * as stream from 'stream'; +import { CredentialBody, Credentials, JWTInput } from './credentials'; +import { IdTokenProvider } from './idtokenclient'; +import { GetTokenResponse, OAuth2Client, OAuth2ClientOptions, RequestMetadataResponse } from './oauth2client'; +export interface JWTOptions extends OAuth2ClientOptions { + email?: string; + keyFile?: string; + key?: string; + keyId?: string; + scopes?: string | string[]; + subject?: string; + additionalClaims?: {}; +} +export declare class JWT extends OAuth2Client implements IdTokenProvider { + email?: string; + keyFile?: string; + key?: string; + keyId?: string; + defaultScopes?: string | string[]; + scopes?: string | string[]; + scope?: string; + subject?: string; + gtoken?: GoogleToken; + additionalClaims?: {}; + useJWTAccessWithScope?: boolean; + defaultServicePath?: string; + private access?; + /** + * JWT service account credentials. + * + * Retrieve access token using gtoken. + * + * @param email service account email address. + * @param keyFile path to private key file. + * @param key value of key + * @param scopes list of requested scopes or a single scope. + * @param subject impersonated account's email address. + * @param key_id the ID of the key + */ + constructor(options: JWTOptions); + constructor(email?: string, keyFile?: string, key?: string, scopes?: string | string[], subject?: string, keyId?: string); + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes?: string | string[]): JWT; + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + protected getRequestMetadataAsync(url?: string | null): Promise; + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + fetchIdToken(targetAudience: string): Promise; + /** + * Determine if there are currently scopes available. + */ + private hasUserScopes; + /** + * Are there any default or user scopes defined. + */ + private hasAnyScopes; + /** + * Get the initial access token using gToken. + * @param callback Optional callback. + * @returns Promise that resolves with credentials + */ + authorize(): Promise; + authorize(callback: (err: Error | null, result?: Credentials) => void): void; + private authorizeAsync; + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Create a gToken if it doesn't already exist. + */ + private createGToken; + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a JWT credentials instance using the given input stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error | null) => void): void; + private fromStreamAsync; + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey: string): void; + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + getCredentials(): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/jwtclient.js b/node_modules/google-auth-library/build/src/auth/jwtclient.js new file mode 100644 index 0000000..c4a41f2 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/jwtclient.js @@ -0,0 +1,280 @@ +"use strict"; +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.JWT = void 0; +const gtoken_1 = require("gtoken"); +const jwtaccess_1 = require("./jwtaccess"); +const oauth2client_1 = require("./oauth2client"); +const authclient_1 = require("./authclient"); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; diff --git a/node_modules/google-auth-library/build/src/auth/loginticket.d.ts b/node_modules/google-auth-library/build/src/auth/loginticket.d.ts new file mode 100644 index 0000000..33fd407 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/loginticket.d.ts @@ -0,0 +1,140 @@ +export declare class LoginTicket { + private envelope?; + private payload?; + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env?: string, pay?: TokenPayload); + getEnvelope(): string | undefined; + getPayload(): TokenPayload | undefined; + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId(): string | null; + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes(): { + envelope: string | undefined; + payload: TokenPayload | undefined; + }; +} +export interface TokenPayload { + /** + * The Issuer Identifier for the Issuer of the response. Always + * https://accounts.google.com or accounts.google.com for Google ID tokens. + */ + iss: string; + /** + * Access token hash. Provides validation that the access token is tied to the + * identity token. If the ID token is issued with an access token in the + * server flow, this is always included. This can be used as an alternate + * mechanism to protect against cross-site request forgery attacks, but if you + * follow Step 1 and Step 3 it is not necessary to verify the access token. + */ + at_hash?: string; + /** + * True if the user's e-mail address has been verified; otherwise false. + */ + email_verified?: boolean; + /** + * An identifier for the user, unique among all Google accounts and never + * reused. A Google account can have multiple emails at different points in + * time, but the sub value is never changed. Use sub within your application + * as the unique-identifier key for the user. + */ + sub: string; + /** + * The client_id of the authorized presenter. This claim is only needed when + * the party requesting the ID token is not the same as the audience of the ID + * token. This may be the case at Google for hybrid apps where a web + * application and Android app have a different client_id but share the same + * project. + */ + azp?: string; + /** + * The user's email address. This may not be unique and is not suitable for + * use as a primary key. Provided only if your scope included the string + * "email". + */ + email?: string; + /** + * The URL of the user's profile page. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When profile claims are present, you can use them to update your app's + * user records. Note that this claim is never guaranteed to be present. + */ + profile?: string; + /** + * The URL of the user's profile picture. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When picture claims are present, you can use them to update your app's + * user records. Note that this claim is never guaranteed to be present. + */ + picture?: string; + /** + * The user's full name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + name?: string; + /** + * The user's given name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + given_name?: string; + /** + * The user's family name, in a displayable form. Might be provided when: + * - The request scope included the string "profile" + * - The ID token is returned from a token refresh + * - When name claims are present, you can use them to update your app's user + * records. Note that this claim is never guaranteed to be present. + */ + family_name?: string; + /** + * Identifies the audience that this ID token is intended for. It must be one + * of the OAuth 2.0 client IDs of your application. + */ + aud: string; + /** + * The time the ID token was issued, represented in Unix time (integer + * seconds). + */ + iat: number; + /** + * The time the ID token expires, represented in Unix time (integer seconds). + */ + exp: number; + /** + * The value of the nonce supplied by your app in the authentication request. + * You should enforce protection against replay attacks by ensuring it is + * presented only once. + */ + nonce?: string; + /** + * The hosted G Suite domain of the user. Provided only if the user belongs to + * a hosted domain. + */ + hd?: string; + /** + * The user's locale, represented by a BCP 47 language tag. + * Might be provided when a name claim is present. + */ + locale?: string; +} diff --git a/node_modules/google-auth-library/build/src/auth/loginticket.js b/node_modules/google-auth-library/build/src/auth/loginticket.js new file mode 100644 index 0000000..9a1687b --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/loginticket.js @@ -0,0 +1,57 @@ +"use strict"; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; diff --git a/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts b/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts new file mode 100644 index 0000000..632c87c --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2client.d.ts @@ -0,0 +1,561 @@ +/// +import { GaxiosError, GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +import * as querystring from 'querystring'; +import { JwkCertificate } from '../crypto/crypto'; +import { BodyResponseCallback } from '../transporters'; +import { AuthClient, AuthClientOptions } from './authclient'; +import { Credentials } from './credentials'; +import { LoginTicket } from './loginticket'; +/** + * The results from the `generateCodeVerifierAsync` method. To learn more, + * See the sample: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ +export interface CodeVerifierResults { + /** + * The code verifier that will be used when calling `getToken` to obtain a new + * access token. + */ + codeVerifier: string; + /** + * The code_challenge that should be sent with the `generateAuthUrl` call + * to obtain a verifiable authentication url. + */ + codeChallenge?: string; +} +export interface Certificates { + [index: string]: string | JwkCertificate; +} +export interface PublicKeys { + [index: string]: string; +} +export interface Headers { + [index: string]: string; +} +export declare enum CodeChallengeMethod { + Plain = "plain", + S256 = "S256" +} +export declare enum CertificateFormat { + PEM = "PEM", + JWK = "JWK" +} +export interface GetTokenOptions { + code: string; + codeVerifier?: string; + /** + * The client ID for your application. The value passed into the constructor + * will be used if not provided. Must match any client_id option passed to + * a corresponding call to generateAuthUrl. + */ + client_id?: string; + /** + * Determines where the API server redirects the user after the user + * completes the authorization flow. The value passed into the constructor + * will be used if not provided. Must match any redirect_uri option passed to + * a corresponding call to generateAuthUrl. + */ + redirect_uri?: string; +} +export interface TokenInfo { + /** + * The application that is the intended user of the access token. + */ + aud: string; + /** + * This value lets you correlate profile information from multiple Google + * APIs. It is only present in the response if you included the profile scope + * in your request in step 1. The field value is an immutable identifier for + * the logged-in user that can be used to create and manage user sessions in + * your application. The identifier is the same regardless of which client ID + * is used to retrieve it. This enables multiple applications in the same + * organization to correlate profile information. + */ + user_id?: string; + /** + * An array of scopes that the user granted access to. + */ + scopes: string[]; + /** + * The datetime when the token becomes invalid. + */ + expiry_date: number; + /** + * An identifier for the user, unique among all Google accounts and never + * reused. A Google account can have multiple emails at different points in + * time, but the sub value is never changed. Use sub within your application + * as the unique-identifier key for the user. + */ + sub?: string; + /** + * The client_id of the authorized presenter. This claim is only needed when + * the party requesting the ID token is not the same as the audience of the ID + * token. This may be the case at Google for hybrid apps where a web + * application and Android app have a different client_id but share the same + * project. + */ + azp?: string; + /** + * Indicates whether your application can refresh access tokens + * when the user is not present at the browser. Valid parameter values are + * 'online', which is the default value, and 'offline'. Set the value to + * 'offline' if your application needs to refresh access tokens when the user + * is not present at the browser. This value instructs the Google + * authorization server to return a refresh token and an access token the + * first time that your application exchanges an authorization code for + * tokens. + */ + access_type?: string; + /** + * The user's email address. This value may not be unique to this user and + * is not suitable for use as a primary key. Provided only if your scope + * included the email scope value. + */ + email?: string; + /** + * True if the user's e-mail address has been verified; otherwise false. + */ + email_verified?: boolean; +} +export interface GenerateAuthUrlOpts { + /** + * Recommended. Indicates whether your application can refresh access tokens + * when the user is not present at the browser. Valid parameter values are + * 'online', which is the default value, and 'offline'. Set the value to + * 'offline' if your application needs to refresh access tokens when the user + * is not present at the browser. This value instructs the Google + * authorization server to return a refresh token and an access token the + * first time that your application exchanges an authorization code for + * tokens. + */ + access_type?: string; + /** + * The hd (hosted domain) parameter streamlines the login process for G Suite + * hosted accounts. By including the domain of the G Suite user (for example, + * mycollege.edu), you can indicate that the account selection UI should be + * optimized for accounts at that domain. To optimize for G Suite accounts + * generally instead of just one domain, use an asterisk: hd=*. + * Don't rely on this UI optimization to control who can access your app, + * as client-side requests can be modified. Be sure to validate that the + * returned ID token has an hd claim value that matches what you expect + * (e.g. mycolledge.edu). Unlike the request parameter, the ID token claim is + * contained within a security token from Google, so the value can be trusted. + */ + hd?: string; + /** + * The 'response_type' will always be set to 'CODE'. + */ + response_type?: string; + /** + * The client ID for your application. The value passed into the constructor + * will be used if not provided. You can find this value in the API Console. + */ + client_id?: string; + /** + * Determines where the API server redirects the user after the user + * completes the authorization flow. The value must exactly match one of the + * 'redirect_uri' values listed for your project in the API Console. Note that + * the http or https scheme, case, and trailing slash ('/') must all match. + * The value passed into the constructor will be used if not provided. + */ + redirect_uri?: string; + /** + * Required. A space-delimited list of scopes that identify the resources that + * your application could access on the user's behalf. These values inform the + * consent screen that Google displays to the user. Scopes enable your + * application to only request access to the resources that it needs while + * also enabling users to control the amount of access that they grant to your + * application. Thus, there is an inverse relationship between the number of + * scopes requested and the likelihood of obtaining user consent. The + * OAuth 2.0 API Scopes document provides a full list of scopes that you might + * use to access Google APIs. We recommend that your application request + * access to authorization scopes in context whenever possible. By requesting + * access to user data in context, via incremental authorization, you help + * users to more easily understand why your application needs the access it is + * requesting. + */ + scope?: string[] | string; + /** + * Recommended. Specifies any string value that your application uses to + * maintain state between your authorization request and the authorization + * server's response. The server returns the exact value that you send as a + * name=value pair in the hash (#) fragment of the 'redirect_uri' after the + * user consents to or denies your application's access request. You can use + * this parameter for several purposes, such as directing the user to the + * correct resource in your application, sending nonces, and mitigating + * cross-site request forgery. Since your redirect_uri can be guessed, using a + * state value can increase your assurance that an incoming connection is the + * result of an authentication request. If you generate a random string or + * encode the hash of a cookie or another value that captures the client's + * state, you can validate the response to additionally ensure that the + * request and response originated in the same browser, providing protection + * against attacks such as cross-site request forgery. See the OpenID Connect + * documentation for an example of how to create and confirm a state token. + */ + state?: string; + /** + * Optional. Enables applications to use incremental authorization to request + * access to additional scopes in context. If you set this parameter's value + * to true and the authorization request is granted, then the new access token + * will also cover any scopes to which the user previously granted the + * application access. See the incremental authorization section for examples. + */ + include_granted_scopes?: boolean; + /** + * Optional. If your application knows which user is trying to authenticate, + * it can use this parameter to provide a hint to the Google Authentication + * Server. The server uses the hint to simplify the login flow either by + * prefilling the email field in the sign-in form or by selecting the + * appropriate multi-login session. Set the parameter value to an email + * address or sub identifier, which is equivalent to the user's Google ID. + */ + login_hint?: string; + /** + * Optional. A space-delimited, case-sensitive list of prompts to present the + * user. If you don't specify this parameter, the user will be prompted only + * the first time your app requests access. Possible values are: + * + * 'none' - Donot display any authentication or consent screens. Must not be + * specified with other values. + * 'consent' - Prompt the user for consent. + * 'select_account' - Prompt the user to select an account. + */ + prompt?: string; + /** + * Recommended. Specifies what method was used to encode a 'code_verifier' + * that will be used during authorization code exchange. This parameter must + * be used with the 'code_challenge' parameter. The value of the + * 'code_challenge_method' defaults to "plain" if not present in the request + * that includes a 'code_challenge'. The only supported values for this + * parameter are "S256" or "plain". + */ + code_challenge_method?: CodeChallengeMethod; + /** + * Recommended. Specifies an encoded 'code_verifier' that will be used as a + * server-side challenge during authorization code exchange. This parameter + * must be used with the 'code_challenge' parameter described above. + */ + code_challenge?: string; + /** + * A way for developers and/or the auth team to provide a set of key value + * pairs to be added as query parameters to the authorization url. + */ + [key: string]: querystring.ParsedUrlQueryInput[keyof querystring.ParsedUrlQueryInput]; +} +export interface AccessTokenResponse { + access_token: string; + expiry_date: number; +} +export interface GetRefreshHandlerCallback { + (): Promise; +} +export interface GetTokenCallback { + (err: GaxiosError | null, token?: Credentials | null, res?: GaxiosResponse | null): void; +} +export interface GetTokenResponse { + tokens: Credentials; + res: GaxiosResponse | null; +} +export interface GetAccessTokenCallback { + (err: GaxiosError | null, token?: string | null, res?: GaxiosResponse | null): void; +} +export interface GetAccessTokenResponse { + token?: string | null; + res?: GaxiosResponse | null; +} +export interface RefreshAccessTokenCallback { + (err: GaxiosError | null, credentials?: Credentials | null, res?: GaxiosResponse | null): void; +} +export interface RefreshAccessTokenResponse { + credentials: Credentials; + res: GaxiosResponse | null; +} +export interface RequestMetadataResponse { + headers: Headers; + res?: GaxiosResponse | null; +} +export interface RequestMetadataCallback { + (err: GaxiosError | null, headers?: Headers, res?: GaxiosResponse | null): void; +} +export interface GetFederatedSignonCertsCallback { + (err: GaxiosError | null, certs?: Certificates, response?: GaxiosResponse | null): void; +} +export interface FederatedSignonCertsResponse { + certs: Certificates; + format: CertificateFormat; + res?: GaxiosResponse | null; +} +export interface GetIapPublicKeysCallback { + (err: GaxiosError | null, pubkeys?: PublicKeys, response?: GaxiosResponse | null): void; +} +export interface IapPublicKeysResponse { + pubkeys: PublicKeys; + res?: GaxiosResponse | null; +} +export interface RevokeCredentialsResult { + success: boolean; +} +export interface VerifyIdTokenOptions { + idToken: string; + audience?: string | string[]; + maxExpiry?: number; +} +export interface OAuth2ClientEndpoints { + /** + * The endpoint for viewing access token information + * + * @example + * 'https://oauth2.googleapis.com/tokeninfo' + */ + tokenInfoUrl: string | URL; + /** + * The base URL for auth endpoints. + * + * @example + * 'https://accounts.google.com/o/oauth2/v2/auth' + */ + oauth2AuthBaseUrl: string | URL; + /** + * The base endpoint for token retrieval + * . + * @example + * 'https://oauth2.googleapis.com/token' + */ + oauth2TokenUrl: string | URL; + /** + * The base endpoint to revoke tokens. + * + * @example + * 'https://oauth2.googleapis.com/revoke' + */ + oauth2RevokeUrl: string | URL; + /** + * Sign on certificates in PEM format. + * + * @example + * 'https://www.googleapis.com/oauth2/v1/certs' + */ + oauth2FederatedSignonPemCertsUrl: string | URL; + /** + * Sign on certificates in JWK format. + * + * @example + * 'https://www.googleapis.com/oauth2/v3/certs' + */ + oauth2FederatedSignonJwkCertsUrl: string | URL; + /** + * IAP Public Key URL. + * This URL contains a JSON dictionary that maps the `kid` claims to the public key values. + * + * @example + * 'https://www.gstatic.com/iap/verify/public_key' + */ + oauth2IapPublicKeyUrl: string | URL; +} +export interface OAuth2ClientOptions extends AuthClientOptions { + clientId?: string; + clientSecret?: string; + redirectUri?: string; + /** + * Customizable endpoints. + */ + endpoints?: Partial; + /** + * The allowed OAuth2 token issuers. + */ + issuers?: string[]; +} +export type RefreshOptions = Pick; +export declare class OAuth2Client extends AuthClient { + private redirectUri?; + private certificateCache; + private certificateExpiry; + private certificateCacheFormat; + protected refreshTokenPromises: Map>; + readonly endpoints: Readonly; + readonly issuers: string[]; + _clientId?: string; + _clientSecret?: string; + apiKey?: string; + refreshHandler?: GetRefreshHandlerCallback; + /** + * Handles OAuth2 flow for Google APIs. + * + * @param clientId The authentication client ID. + * @param clientSecret The authentication client secret. + * @param redirectUri The URI to redirect to after completing the auth + * request. + * @param opts optional options for overriding the given parameters. + * @constructor + */ + constructor(options?: OAuth2ClientOptions); + constructor(clientId?: string, clientSecret?: string, redirectUri?: string); + /** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ + protected static readonly GOOGLE_TOKEN_INFO_URL = "https://oauth2.googleapis.com/tokeninfo"; + /** + * Clock skew - five minutes in seconds + */ + private static readonly CLOCK_SKEW_SECS_; + /** + * The default max Token Lifetime is one day in seconds + */ + private static readonly DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts?: GenerateAuthUrlOpts): string; + generateCodeVerifier(): void; + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + generateCodeVerifierAsync(): Promise; + /** + * Gets the access token for the given code. + * @param code The authorization code. + * @param callback Optional callback fn. + */ + getToken(code: string): Promise; + getToken(options: GetTokenOptions): Promise; + getToken(code: string, callback: GetTokenCallback): void; + getToken(options: GetTokenOptions, callback: GetTokenCallback): void; + private getTokenAsync; + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + protected refreshToken(refreshToken?: string | null): Promise; + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Retrieves the access token using refresh token + * + * @param callback callback + */ + refreshAccessToken(): Promise; + refreshAccessToken(callback: RefreshAccessTokenCallback): void; + private refreshAccessTokenAsync; + /** + * Get a non-expired access token, after refreshing if necessary + * + * @param callback Callback to call with the access token + */ + getAccessToken(): Promise; + getAccessToken(callback: GetAccessTokenCallback): void; + private getAccessTokenAsync; + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + getRequestHeaders(url?: string): Promise; + protected getRequestMetadataAsync(url?: string | URL | null): Promise; + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token: string): string; + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token: string): URL; + /** + * Revokes the access given to token. + * @param token The existing token to be revoked. + * @param callback Optional callback fn. + */ + revokeToken(token: string): GaxiosPromise; + revokeToken(token: string, callback: BodyResponseCallback): void; + /** + * Revokes access token and clears the credentials object + * @param callback callback + */ + revokeCredentials(): GaxiosPromise; + revokeCredentials(callback: BodyResponseCallback): void; + private revokeCredentialsAsync; + /** + * Provides a request implementation with OAuth 2.0 flow. If credentials have + * a refresh_token, in cases of HTTP 401 and 403 responses, it automatically + * asks for a new access token and replays the unsuccessful request. + * @param opts Request options. + * @param callback callback. + * @return Request object + */ + request(opts: GaxiosOptions): GaxiosPromise; + request(opts: GaxiosOptions, callback: BodyResponseCallback): void; + protected requestAsync(opts: GaxiosOptions, retry?: boolean): Promise>; + /** + * Verify id token is token by checking the certs and audience + * @param options that contains all options. + * @param callback Callback supplying GoogleLogin if successful + */ + verifyIdToken(options: VerifyIdTokenOptions): Promise; + verifyIdToken(options: VerifyIdTokenOptions, callback: (err: Error | null, login?: LoginTicket) => void): void; + private verifyIdTokenAsync; + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + getTokenInfo(accessToken: string): Promise; + /** + * Gets federated sign-on certificates to use for verifying identity tokens. + * Returns certs as array structure, where keys are key ids, and values + * are certificates in either PEM or JWK format. + * @param callback Callback supplying the certificates + */ + getFederatedSignonCerts(): Promise; + getFederatedSignonCerts(callback: GetFederatedSignonCertsCallback): void; + getFederatedSignonCertsAsync(): Promise; + /** + * Gets federated sign-on certificates to use for verifying identity tokens. + * Returns certs as array structure, where keys are key ids, and values + * are certificates in either PEM or JWK format. + * @param callback Callback supplying the certificates + */ + getIapPublicKeys(): Promise; + getIapPublicKeys(callback: GetIapPublicKeysCallback): void; + getIapPublicKeysAsync(): Promise; + verifySignedJwtWithCerts(): void; + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + verifySignedJwtWithCertsAsync(jwt: string, certs: Certificates | PublicKeys, requiredAudience?: string | string[], issuers?: string[], maxExpiry?: number): Promise; + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + private processAndValidateRefreshHandler; + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + protected isTokenExpiring(): boolean; +} diff --git a/node_modules/google-auth-library/build/src/auth/oauth2client.js b/node_modules/google-auth-library/build/src/auth/oauth2client.js new file mode 100644 index 0000000..6b4e4d1 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2client.js @@ -0,0 +1,760 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = require("gaxios"); +const querystring = require("querystring"); +const stream = require("stream"); +const formatEcdsa = require("ecdsa-sig-formatter"); +const crypto_1 = require("../crypto/crypto"); +const authclient_1 = require("./authclient"); +const loginticket_1 = require("./loginticket"); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const values = { + code: options.code, + client_id: options.client_id || this._clientId, + client_secret: this._clientSecret, + redirect_uri: options.redirect_uri || this.redirectUri, + grant_type: 'authorization_code', + code_verifier: options.codeVerifier, + }; + const res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(values), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, retry = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!retry && isAuthErr && !isReadableStream && mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!retry && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ url }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ url }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; diff --git a/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts b/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts new file mode 100644 index 0000000..a26dc66 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2common.d.ts @@ -0,0 +1,82 @@ +import { GaxiosOptions } from 'gaxios'; +/** + * OAuth error codes. + * https://tools.ietf.org/html/rfc6749#section-5.2 + */ +type OAuthErrorCode = 'invalid_request' | 'invalid_client' | 'invalid_grant' | 'unauthorized_client' | 'unsupported_grant_type' | 'invalid_scope' | string; +/** + * The standard OAuth error response. + * https://tools.ietf.org/html/rfc6749#section-5.2 + */ +export interface OAuthErrorResponse { + error: OAuthErrorCode; + error_description?: string; + error_uri?: string; +} +/** + * OAuth client authentication types. + * https://tools.ietf.org/html/rfc6749#section-2.3 + */ +export type ConfidentialClientType = 'basic' | 'request-body'; +/** + * Defines the client authentication credentials for basic and request-body + * credentials. + * https://tools.ietf.org/html/rfc6749#section-2.3.1 + */ +export interface ClientAuthentication { + confidentialClientType: ConfidentialClientType; + clientId: string; + clientSecret?: string; +} +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +export declare abstract class OAuthClientAuthHandler { + private readonly clientAuthentication?; + private crypto; + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication?: ClientAuthentication | undefined); + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + protected applyClientAuthenticationOptions(opts: GaxiosOptions, bearerToken?: string): void; + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + private injectAuthenticatedHeaders; + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + private injectAuthenticatedRequestBody; +} +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +export declare function getErrorFromOAuthErrorResponse(resp: OAuthErrorResponse, err?: Error): Error; +export {}; diff --git a/node_modules/google-auth-library/build/src/auth/oauth2common.js b/node_modules/google-auth-library/build/src/auth/oauth2common.js new file mode 100644 index 0000000..8261c71 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/oauth2common.js @@ -0,0 +1,175 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; +const querystring = require("querystring"); +const crypto_1 = require("../crypto/crypto"); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; diff --git a/node_modules/google-auth-library/build/src/auth/passthrough.d.ts b/node_modules/google-auth-library/build/src/auth/passthrough.d.ts new file mode 100644 index 0000000..6ec4617 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/passthrough.d.ts @@ -0,0 +1,38 @@ +import { GaxiosOptions } from 'gaxios'; +import { AuthClient } from './authclient'; +import { GetAccessTokenResponse, Headers } from './oauth2client'; +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +export declare class PassThroughClient extends AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + request(opts: GaxiosOptions): Promise>; + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + getAccessToken(): Promise; + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + getRequestHeaders(): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/passthrough.js b/node_modules/google-auth-library/build/src/auth/passthrough.js new file mode 100644 index 0000000..07e9812 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/passthrough.js @@ -0,0 +1,61 @@ +"use strict"; +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PassThroughClient = void 0; +const authclient_1 = require("./authclient"); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts new file mode 100644 index 0000000..6b4e62f --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.d.ts @@ -0,0 +1,155 @@ +import { BaseExternalAccountClient, BaseExternalAccountClientOptions } from './baseexternalclient'; +import { AuthClientOptions } from './authclient'; +/** + * Defines the credential source portion of the configuration for PluggableAuthClient. + * + *

Command is the only required field. If timeout_millis is not specified, the library will + * default to a 30-second timeout. + * + *

+ * Sample credential source for Pluggable Auth Client:
+ * {
+ *   ...
+ *   "credential_source": {
+ *     "executable": {
+ *       "command": "/path/to/get/credentials.sh --arg1=value1 --arg2=value2",
+ *       "timeout_millis": 5000,
+ *       "output_file": "/path/to/generated/cached/credentials"
+ *     }
+ *   }
+ * }
+ * 
+ */ +export interface PluggableAuthClientOptions extends BaseExternalAccountClientOptions { + credential_source: { + executable: { + /** + * The command used to retrieve the 3rd party token. + */ + command: string; + /** + * The timeout for executable to run in milliseconds. If none is provided it + * will be set to the default timeout of 30 seconds. + */ + timeout_millis?: number; + /** + * An optional output file location that will be checked for a cached response + * from a previous run of the executable. + */ + output_file?: string; + }; + }; +} +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +export declare class ExecutableError extends Error { + /** + * The exit code returned by the executable. + */ + readonly code: string; + constructor(message: string, code: string); +} +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +export declare class PluggableAuthClient extends BaseExternalAccountClient { + /** + * The command used to retrieve the third party token. + */ + private readonly command; + /** + * The timeout in milliseconds for running executable, + * set to default if none provided. + */ + private readonly timeoutMillis; + /** + * The path to file to check for cached executable response. + */ + private readonly outputFile?; + /** + * Executable and output file handler. + */ + private readonly handler; + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options: PluggableAuthClientOptions, additionalOptions?: AuthClientOptions); + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + retrieveSubjectToken(): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js new file mode 100644 index 0000000..acb299d --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-client.js @@ -0,0 +1,215 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = require("./baseexternalclient"); +const executable_response_1 = require("./executable-response"); +const pluggable_auth_handler_1 = require("./pluggable-auth-handler"); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts new file mode 100644 index 0000000..e5ddd02 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.d.ts @@ -0,0 +1,51 @@ +import { ExecutableResponse } from './executable-response'; +/** + * Defines the options used for the PluggableAuthHandler class. + */ +export interface PluggableAuthHandlerOptions { + /** + * The command used to retrieve the third party token. + */ + command: string; + /** + * The timeout in milliseconds for running executable, + * set to default if none provided. + */ + timeoutMillis: number; + /** + * The path to file to check for cached executable response. + */ + outputFile?: string; +} +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +export declare class PluggableAuthHandler { + private readonly commandComponents; + private readonly timeoutMillis; + private readonly outputFile?; + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options: PluggableAuthHandlerOptions); + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap: Map): Promise; + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + retrieveCachedResponse(): Promise; + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + private static parseCommand; +} diff --git a/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js new file mode 100644 index 0000000..47fa276 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/pluggable-auth-handler.js @@ -0,0 +1,156 @@ +"use strict"; +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = require("./pluggable-auth-client"); +const executable_response_1 = require("./executable-response"); +const childProcess = require("child_process"); +const fs = require("fs"); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; diff --git a/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts b/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts new file mode 100644 index 0000000..1143543 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/refreshclient.d.ts @@ -0,0 +1,44 @@ +/// +import * as stream from 'stream'; +import { JWTInput } from './credentials'; +import { GetTokenResponse, OAuth2Client, OAuth2ClientOptions } from './oauth2client'; +export declare const USER_REFRESH_ACCOUNT_TYPE = "authorized_user"; +export interface UserRefreshClientOptions extends OAuth2ClientOptions { + clientId?: string; + clientSecret?: string; + refreshToken?: string; +} +export declare class UserRefreshClient extends OAuth2Client { + _refreshToken?: string | null; + /** + * User Refresh Token credentials. + * + * @param clientId The authentication client ID. + * @param clientSecret The authentication client secret. + * @param refreshToken The authentication refresh token. + */ + constructor(clientId?: string, clientSecret?: string, refreshToken?: string); + constructor(options: UserRefreshClientOptions); + constructor(clientId?: string, clientSecret?: string, refreshToken?: string); + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + protected refreshTokenNoCache(refreshToken?: string | null): Promise; + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json: JWTInput): void; + /** + * Create a UserRefreshClient credentials instance using the given input + * stream. + * @param inputStream The input stream. + * @param callback Optional callback. + */ + fromStream(inputStream: stream.Readable): Promise; + fromStream(inputStream: stream.Readable, callback: (err?: Error) => void): void; + private fromStreamAsync; +} diff --git a/node_modules/google-auth-library/build/src/auth/refreshclient.js b/node_modules/google-auth-library/build/src/auth/refreshclient.js new file mode 100644 index 0000000..ef00cb5 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/refreshclient.js @@ -0,0 +1,103 @@ +"use strict"; +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = require("./oauth2client"); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } +} +exports.UserRefreshClient = UserRefreshClient; diff --git a/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts b/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts new file mode 100644 index 0000000..4c9adf5 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/stscredentials.d.ts @@ -0,0 +1,114 @@ +import { GaxiosResponse } from 'gaxios'; +import { Headers } from './oauth2client'; +import { ClientAuthentication, OAuthClientAuthHandler } from './oauth2common'; +/** + * Defines the interface needed to initialize an StsCredentials instance. + * The interface does not directly map to the spec and instead is converted + * to be compliant with the JavaScript style guide. This is because this is + * instantiated internally. + * StsCredentials implement the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693. + * Request options are defined in + * https://tools.ietf.org/html/rfc8693#section-2.1 + */ +export interface StsCredentialsOptions { + /** + * REQUIRED. The value "urn:ietf:params:oauth:grant-type:token-exchange" + * indicates that a token exchange is being performed. + */ + grantType: string; + /** + * OPTIONAL. A URI that indicates the target service or resource where the + * client intends to use the requested security token. + */ + resource?: string; + /** + * OPTIONAL. The logical name of the target service where the client + * intends to use the requested security token. This serves a purpose + * similar to the "resource" parameter but with the client providing a + * logical name for the target service. + */ + audience?: string; + /** + * OPTIONAL. A list of space-delimited, case-sensitive strings, as defined + * in Section 3.3 of [RFC6749], that allow the client to specify the desired + * scope of the requested security token in the context of the service or + * resource where the token will be used. + */ + scope?: string[]; + /** + * OPTIONAL. An identifier, as described in Section 3 of [RFC8693], eg. + * "urn:ietf:params:oauth:token-type:access_token" for the type of the + * requested security token. + */ + requestedTokenType?: string; + /** + * REQUIRED. A security token that represents the identity of the party on + * behalf of whom the request is being made. + */ + subjectToken: string; + /** + * REQUIRED. An identifier, as described in Section 3 of [RFC8693], that + * indicates the type of the security token in the "subject_token" parameter. + */ + subjectTokenType: string; + actingParty?: { + /** + * OPTIONAL. A security token that represents the identity of the acting + * party. Typically, this will be the party that is authorized to use the + * requested security token and act on behalf of the subject. + */ + actorToken: string; + /** + * An identifier, as described in Section 3, that indicates the type of the + * security token in the "actor_token" parameter. This is REQUIRED when the + * "actor_token" parameter is present in the request but MUST NOT be + * included otherwise. + */ + actorTokenType: string; + }; +} +/** + * Defines the OAuth 2.0 token exchange successful response based on + * https://tools.ietf.org/html/rfc8693#section-2.2.1 + */ +export interface StsSuccessfulResponse { + access_token: string; + issued_token_type: string; + token_type: string; + expires_in?: number; + refresh_token?: string; + scope?: string; + res?: GaxiosResponse | null; +} +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +export declare class StsCredentials extends OAuthClientAuthHandler { + private readonly tokenExchangeEndpoint; + private transporter; + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint: string | URL, clientAuthentication?: ClientAuthentication); + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + exchangeToken(stsCredentialsOptions: StsCredentialsOptions, additionalHeaders?: Headers, options?: { + [key: string]: any; + }): Promise; +} diff --git a/node_modules/google-auth-library/build/src/auth/stscredentials.js b/node_modules/google-auth-library/build/src/auth/stscredentials.js new file mode 100644 index 0000000..e8ca807 --- /dev/null +++ b/node_modules/google-auth-library/build/src/auth/stscredentials.js @@ -0,0 +1,108 @@ +"use strict"; +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.StsCredentials = void 0; +const gaxios_1 = require("gaxios"); +const querystring = require("querystring"); +const transporters_1 = require("../transporters"); +const oauth2common_1 = require("./oauth2common"); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; diff --git a/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts new file mode 100644 index 0000000..e0ed1ae --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/browser/crypto.d.ts @@ -0,0 +1,27 @@ +import { Crypto, JwkCertificate } from '../crypto'; +export declare class BrowserCrypto implements Crypto { + constructor(); + sha256DigestBase64(str: string): Promise; + randomBytesBase64(count: number): string; + private static padBase64; + verify(pubkey: JwkCertificate, data: string, signature: string): Promise; + sign(privateKey: JwkCertificate, data: string): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} diff --git a/node_modules/google-auth-library/build/src/crypto/browser/crypto.js b/node_modules/google-auth-library/build/src/crypto/browser/crypto.js new file mode 100644 index 0000000..6bcd947 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/browser/crypto.js @@ -0,0 +1,133 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = require("base64-js"); +const crypto_1 = require("../crypto"); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; diff --git a/node_modules/google-auth-library/build/src/crypto/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/crypto.d.ts new file mode 100644 index 0000000..f45f3a6 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/crypto.d.ts @@ -0,0 +1,45 @@ +/// +export interface JwkCertificate { + kty: string; + alg: string; + use?: string; + kid: string; + n: string; + e: string; +} +export interface CryptoSigner { + update(data: string): void; + sign(key: string, outputFormat: string): string; +} +export interface Crypto { + sha256DigestBase64(str: string): Promise; + randomBytesBase64(n: number): string; + verify(pubkey: string | JwkCertificate, data: string | Buffer, signature: string): Promise; + sign(privateKey: string | JwkCertificate, data: string | Buffer): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} +export declare function createCrypto(): Crypto; +export declare function hasBrowserCrypto(): boolean; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +export declare function fromArrayBufferToHex(arrayBuffer: ArrayBuffer): string; diff --git a/node_modules/google-auth-library/build/src/crypto/crypto.js b/node_modules/google-auth-library/build/src/crypto/crypto.js new file mode 100644 index 0000000..f596339 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/crypto.js @@ -0,0 +1,48 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; +const crypto_1 = require("./browser/crypto"); +const crypto_2 = require("./node/crypto"); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +exports.createCrypto = createCrypto; +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +exports.hasBrowserCrypto = hasBrowserCrypto; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} +exports.fromArrayBufferToHex = fromArrayBufferToHex; diff --git a/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts b/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts new file mode 100644 index 0000000..04c6425 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/node/crypto.d.ts @@ -0,0 +1,26 @@ +/// +import { Crypto } from '../crypto'; +export declare class NodeCrypto implements Crypto { + sha256DigestBase64(str: string): Promise; + randomBytesBase64(count: number): string; + verify(pubkey: string, data: string | Buffer, signature: string): Promise; + sign(privateKey: string, data: string | Buffer): Promise; + decodeBase64StringUtf8(base64: string): string; + encodeBase64StringUtf8(text: string): string; + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + sha256DigestHex(str: string): Promise; + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + signWithHmacSha256(key: string | ArrayBuffer, msg: string): Promise; +} diff --git a/node_modules/google-auth-library/build/src/crypto/node/crypto.js b/node_modules/google-auth-library/build/src/crypto/node/crypto.js new file mode 100644 index 0000000..26ede46 --- /dev/null +++ b/node_modules/google-auth-library/build/src/crypto/node/crypto.js @@ -0,0 +1,82 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NodeCrypto = void 0; +const crypto = require("crypto"); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} diff --git a/node_modules/google-auth-library/build/src/index.d.ts b/node_modules/google-auth-library/build/src/index.d.ts new file mode 100644 index 0000000..c4b8d96 --- /dev/null +++ b/node_modules/google-auth-library/build/src/index.d.ts @@ -0,0 +1,25 @@ +import { GoogleAuth } from './auth/googleauth'; +export * as gcpMetadata from 'gcp-metadata'; +export { AuthClient, DEFAULT_UNIVERSE } from './auth/authclient'; +export { Compute, ComputeOptions } from './auth/computeclient'; +export { CredentialBody, CredentialRequest, Credentials, JWTInput, } from './auth/credentials'; +export { GCPEnv } from './auth/envDetect'; +export { GoogleAuthOptions, ProjectIdCallback } from './auth/googleauth'; +export { IAMAuth, RequestMetadata } from './auth/iam'; +export { IdTokenClient, IdTokenProvider } from './auth/idtokenclient'; +export { Claims, JWTAccess } from './auth/jwtaccess'; +export { JWT, JWTOptions } from './auth/jwtclient'; +export { Impersonated, ImpersonatedOptions } from './auth/impersonated'; +export { Certificates, CodeChallengeMethod, CodeVerifierResults, GenerateAuthUrlOpts, GetTokenOptions, OAuth2Client, OAuth2ClientOptions, RefreshOptions, TokenInfo, VerifyIdTokenOptions, } from './auth/oauth2client'; +export { LoginTicket, TokenPayload } from './auth/loginticket'; +export { UserRefreshClient, UserRefreshClientOptions, } from './auth/refreshclient'; +export { AwsClient, AwsClientOptions } from './auth/awsclient'; +export { IdentityPoolClient, IdentityPoolClientOptions, } from './auth/identitypoolclient'; +export { ExternalAccountClient, ExternalAccountClientOptions, } from './auth/externalclient'; +export { BaseExternalAccountClient, BaseExternalAccountClientOptions, } from './auth/baseexternalclient'; +export { CredentialAccessBoundary, DownscopedClient, } from './auth/downscopedclient'; +export { PluggableAuthClient, PluggableAuthClientOptions, } from './auth/pluggable-auth-client'; +export { PassThroughClient } from './auth/passthrough'; +export { DefaultTransporter } from './transporters'; +declare const auth: GoogleAuth; +export { auth, GoogleAuth }; diff --git a/node_modules/google-auth-library/build/src/index.js b/node_modules/google-auth-library/build/src/index.js new file mode 100644 index 0000000..5efe355 --- /dev/null +++ b/node_modules/google-auth-library/build/src/index.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = require("./auth/googleauth"); +Object.defineProperty(exports, "GoogleAuth", { enumerable: true, get: function () { return googleauth_1.GoogleAuth; } }); +exports.gcpMetadata = require("gcp-metadata"); +var authclient_1 = require("./auth/authclient"); +Object.defineProperty(exports, "AuthClient", { enumerable: true, get: function () { return authclient_1.AuthClient; } }); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", { enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } }); +var computeclient_1 = require("./auth/computeclient"); +Object.defineProperty(exports, "Compute", { enumerable: true, get: function () { return computeclient_1.Compute; } }); +var envDetect_1 = require("./auth/envDetect"); +Object.defineProperty(exports, "GCPEnv", { enumerable: true, get: function () { return envDetect_1.GCPEnv; } }); +var iam_1 = require("./auth/iam"); +Object.defineProperty(exports, "IAMAuth", { enumerable: true, get: function () { return iam_1.IAMAuth; } }); +var idtokenclient_1 = require("./auth/idtokenclient"); +Object.defineProperty(exports, "IdTokenClient", { enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } }); +var jwtaccess_1 = require("./auth/jwtaccess"); +Object.defineProperty(exports, "JWTAccess", { enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } }); +var jwtclient_1 = require("./auth/jwtclient"); +Object.defineProperty(exports, "JWT", { enumerable: true, get: function () { return jwtclient_1.JWT; } }); +var impersonated_1 = require("./auth/impersonated"); +Object.defineProperty(exports, "Impersonated", { enumerable: true, get: function () { return impersonated_1.Impersonated; } }); +var oauth2client_1 = require("./auth/oauth2client"); +Object.defineProperty(exports, "CodeChallengeMethod", { enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } }); +Object.defineProperty(exports, "OAuth2Client", { enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } }); +var loginticket_1 = require("./auth/loginticket"); +Object.defineProperty(exports, "LoginTicket", { enumerable: true, get: function () { return loginticket_1.LoginTicket; } }); +var refreshclient_1 = require("./auth/refreshclient"); +Object.defineProperty(exports, "UserRefreshClient", { enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } }); +var awsclient_1 = require("./auth/awsclient"); +Object.defineProperty(exports, "AwsClient", { enumerable: true, get: function () { return awsclient_1.AwsClient; } }); +var identitypoolclient_1 = require("./auth/identitypoolclient"); +Object.defineProperty(exports, "IdentityPoolClient", { enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } }); +var externalclient_1 = require("./auth/externalclient"); +Object.defineProperty(exports, "ExternalAccountClient", { enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } }); +var baseexternalclient_1 = require("./auth/baseexternalclient"); +Object.defineProperty(exports, "BaseExternalAccountClient", { enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } }); +var downscopedclient_1 = require("./auth/downscopedclient"); +Object.defineProperty(exports, "DownscopedClient", { enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } }); +var pluggable_auth_client_1 = require("./auth/pluggable-auth-client"); +Object.defineProperty(exports, "PluggableAuthClient", { enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } }); +var passthrough_1 = require("./auth/passthrough"); +Object.defineProperty(exports, "PassThroughClient", { enumerable: true, get: function () { return passthrough_1.PassThroughClient; } }); +var transporters_1 = require("./transporters"); +Object.defineProperty(exports, "DefaultTransporter", { enumerable: true, get: function () { return transporters_1.DefaultTransporter; } }); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; diff --git a/node_modules/google-auth-library/build/src/messages.d.ts b/node_modules/google-auth-library/build/src/messages.d.ts new file mode 100644 index 0000000..9de99bc --- /dev/null +++ b/node_modules/google-auth-library/build/src/messages.d.ts @@ -0,0 +1,11 @@ +export declare enum WarningTypes { + WARNING = "Warning", + DEPRECATION = "DeprecationWarning" +} +export declare function warn(warning: Warning): void; +export interface Warning { + code: string; + type: WarningTypes; + message: string; + warned?: boolean; +} diff --git a/node_modules/google-auth-library/build/src/messages.js b/node_modules/google-auth-library/build/src/messages.js new file mode 100644 index 0000000..55ee1e8 --- /dev/null +++ b/node_modules/google-auth-library/build/src/messages.js @@ -0,0 +1,39 @@ +"use strict"; +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.warn = exports.WarningTypes = void 0; +var WarningTypes; +(function (WarningTypes) { + WarningTypes["WARNING"] = "Warning"; + WarningTypes["DEPRECATION"] = "DeprecationWarning"; +})(WarningTypes || (exports.WarningTypes = WarningTypes = {})); +function warn(warning) { + // Only show a given warning once + if (warning.warned) { + return; + } + warning.warned = true; + if (typeof process !== 'undefined' && process.emitWarning) { + // @types/node doesn't recognize the emitWarning syntax which + // accepts a config object, so `as any` it is + // https://nodejs.org/docs/latest-v8.x/api/process.html#process_process_emitwarning_warning_options + // eslint-disable-next-line @typescript-eslint/no-explicit-any + process.emitWarning(warning.message, warning); + } + else { + console.warn(warning.message); + } +} +exports.warn = warn; diff --git a/node_modules/google-auth-library/build/src/options.d.ts b/node_modules/google-auth-library/build/src/options.d.ts new file mode 100644 index 0000000..ede9689 --- /dev/null +++ b/node_modules/google-auth-library/build/src/options.d.ts @@ -0,0 +1 @@ +export declare function validate(options: any): void; diff --git a/node_modules/google-auth-library/build/src/options.js b/node_modules/google-auth-library/build/src/options.js new file mode 100644 index 0000000..c7ff237 --- /dev/null +++ b/node_modules/google-auth-library/build/src/options.js @@ -0,0 +1,35 @@ +"use strict"; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validate = void 0; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} +exports.validate = validate; diff --git a/node_modules/google-auth-library/build/src/transporters.d.ts b/node_modules/google-auth-library/build/src/transporters.d.ts new file mode 100644 index 0000000..b0ff7f2 --- /dev/null +++ b/node_modules/google-auth-library/build/src/transporters.d.ts @@ -0,0 +1,40 @@ +import { Gaxios, GaxiosError, GaxiosOptions, GaxiosPromise, GaxiosResponse } from 'gaxios'; +export interface Transporter { + defaults?: GaxiosOptions; + request(opts: GaxiosOptions): GaxiosPromise; +} +export interface BodyResponseCallback { + (err: Error | null, res?: GaxiosResponse | null): void; +} +export interface RequestError extends GaxiosError { + errors: Error[]; +} +export declare class DefaultTransporter implements Transporter { + /** + * Default user agent. + */ + static readonly USER_AGENT: string; + /** + * A configurable, replacable `Gaxios` instance. + */ + instance: Gaxios; + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts?: GaxiosOptions): GaxiosOptions; + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts: GaxiosOptions): GaxiosPromise; + get defaults(): GaxiosOptions; + set defaults(opts: GaxiosOptions); + /** + * Changes the error to include details from the body. + */ + private processError; +} diff --git a/node_modules/google-auth-library/build/src/transporters.js b/node_modules/google-auth-library/build/src/transporters.js new file mode 100644 index 0000000..365c2b3 --- /dev/null +++ b/node_modules/google-auth-library/build/src/transporters.js @@ -0,0 +1,109 @@ +"use strict"; +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultTransporter = void 0; +const gaxios_1 = require("gaxios"); +const options_1 = require("./options"); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = require('../../package.json'); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; diff --git a/node_modules/google-auth-library/build/src/util.d.ts b/node_modules/google-auth-library/build/src/util.d.ts new file mode 100644 index 0000000..2c01475 --- /dev/null +++ b/node_modules/google-auth-library/build/src/util.d.ts @@ -0,0 +1,151 @@ +/** + * A utility for converting snake_case to camelCase. + * + * For, for example `my_snake_string` becomes `mySnakeString`. + * + * @internal + */ +export type SnakeToCamel = S extends `${infer FirstWord}_${infer Remainder}` ? `${FirstWord}${Capitalize>}` : S; +/** + * A utility for converting an type's keys from snake_case + * to camelCase, if the keys are strings. + * + * For example: + * + * ```ts + * { + * my_snake_string: boolean; + * myCamelString: string; + * my_snake_obj: { + * my_snake_obj_string: string; + * }; + * } + * ``` + * + * becomes: + * + * ```ts + * { + * mySnakeString: boolean; + * myCamelString: string; + * mySnakeObj: { + * mySnakeObjString: string; + * } + * } + * ``` + * + * @remarks + * + * The generated documentation for the camelCase'd properties won't be available + * until {@link https://github.com/microsoft/TypeScript/issues/50715} has been + * resolved. + * + * @internal + */ +export type SnakeToCamelObject = { + [K in keyof T as SnakeToCamel]: T[K] extends {} ? SnakeToCamelObject : T[K]; +}; +/** + * A utility for adding camelCase versions of a type's snake_case keys, if the + * keys are strings, preserving any existing keys. + * + * For example: + * + * ```ts + * { + * my_snake_boolean: boolean; + * myCamelString: string; + * my_snake_obj: { + * my_snake_obj_string: string; + * }; + * } + * ``` + * + * becomes: + * + * ```ts + * { + * my_snake_boolean: boolean; + * mySnakeBoolean: boolean; + * myCamelString: string; + * my_snake_obj: { + * my_snake_obj_string: string; + * }; + * mySnakeObj: { + * mySnakeObjString: string; + * } + * } + * ``` + * @remarks + * + * The generated documentation for the camelCase'd properties won't be available + * until {@link https://github.com/microsoft/TypeScript/issues/50715} has been + * resolved. + * + * Tracking: {@link https://github.com/googleapis/google-auth-library-nodejs/issues/1686} + * + * @internal + */ +export type OriginalAndCamel = { + [K in keyof T as K | SnakeToCamel]: T[K] extends {} ? OriginalAndCamel : T[K]; +}; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +export declare function snakeToCamel(str: T): SnakeToCamel; +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +export declare function originalOrCamelOptions(obj?: T): { + get: & string>(key: K) => OriginalAndCamel[K]; +}; +export interface LRUCacheOptions { + /** + * The maximum number of items to cache. + */ + capacity: number; + /** + * An optional max age for items in milliseconds. + */ + maxAge?: number; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +export declare class LRUCache { + #private; + readonly capacity: number; + maxAge?: number; + constructor(options: LRUCacheOptions); + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key: string, value: T): void; + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key: string): T | undefined; +} diff --git a/node_modules/google-auth-library/build/src/util.js b/node_modules/google-auth-library/build/src/util.js new file mode 100644 index 0000000..8127d4d --- /dev/null +++ b/node_modules/google-auth-library/build/src/util.js @@ -0,0 +1,125 @@ +"use strict"; +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +exports.snakeToCamel = snakeToCamel; +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; diff --git a/node_modules/google-auth-library/package.json b/node_modules/google-auth-library/package.json new file mode 100644 index 0000000..ce217c3 --- /dev/null +++ b/node_modules/google-auth-library/package.json @@ -0,0 +1,90 @@ +{ + "name": "google-auth-library", + "version": "9.7.0", + "author": "Google Inc.", + "description": "Google APIs Authentication Client Library for Node.js", + "engines": { + "node": ">=14" + }, + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "repository": "googleapis/google-auth-library-nodejs.git", + "keywords": [ + "google", + "api", + "google apis", + "client", + "client library" + ], + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "devDependencies": { + "@compodoc/compodoc": "^1.1.7", + "@types/base64-js": "^1.2.5", + "@types/chai": "^4.1.7", + "@types/jws": "^3.1.0", + "@types/mocha": "^9.0.0", + "@types/mv": "^2.1.0", + "@types/ncp": "^2.0.1", + "@types/node": "^20.4.2", + "@types/sinon": "^10.0.0", + "assert-rejects": "^1.0.0", + "c8": "^8.0.0", + "chai": "^4.2.0", + "codecov": "^3.0.2", + "execa": "^5.0.0", + "gts": "^5.0.0", + "is-docker": "^2.0.0", + "karma": "^6.0.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-firefox-launcher": "^2.0.0", + "karma-mocha": "^2.0.0", + "karma-sourcemap-loader": "^0.4.0", + "karma-webpack": "5.0.0", + "keypair": "^1.0.4", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "mv": "^2.1.1", + "ncp": "^2.0.0", + "nock": "^13.0.0", + "null-loader": "^4.0.0", + "puppeteer": "^21.0.0", + "sinon": "^15.0.0", + "ts-loader": "^8.0.0", + "typescript": "^5.1.6", + "webpack": "^5.21.2", + "webpack-cli": "^4.0.0" + }, + "files": [ + "build/src", + "!build/src/**/*.map" + ], + "scripts": { + "test": "c8 mocha build/test", + "clean": "gts clean", + "prepare": "npm run compile", + "lint": "gts check", + "compile": "tsc -p .", + "fix": "gts fix", + "pretest": "npm run compile -- --sourceMap", + "docs": "compodoc src/", + "samples-setup": "cd samples/ && npm link ../ && npm run setup && cd ../", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "system-test": "mocha build/system-test --timeout 60000", + "presystem-test": "npm run compile -- --sourceMap", + "webpack": "webpack", + "browser-test": "karma start", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "license": "Apache-2.0" +} diff --git a/node_modules/gtoken/CHANGELOG.md b/node_modules/gtoken/CHANGELOG.md new file mode 100644 index 0000000..6b5ccce --- /dev/null +++ b/node_modules/gtoken/CHANGELOG.md @@ -0,0 +1,372 @@ +# Changelog + +[npm history][1] + +[1]: https://www.npmjs.com/package/gtoken?activeTab=versions + +## [7.1.0](https://github.com/googleapis/node-gtoken/compare/v7.0.1...v7.1.0) (2024-02-01) + + +### Features + +* Enable Token Retries ([#481](https://github.com/googleapis/node-gtoken/issues/481)) ([ed9f91e](https://github.com/googleapis/node-gtoken/commit/ed9f91e4764744426de95fd2510b68ee53677514)) + +## [7.0.1](https://github.com/googleapis/node-gtoken/compare/v7.0.0...v7.0.1) (2023-07-12) + + +### Bug Fixes + +* **deps:** Update gaxios to 6.0.0 ([#462](https://github.com/googleapis/node-gtoken/issues/462)) ([c05a2b3](https://github.com/googleapis/node-gtoken/commit/c05a2b35d1bb369fc54f80784d9361c0d6cbc2e7)) + +## [7.0.0](https://github.com/googleapis/node-gtoken/compare/v6.1.2...v7.0.0) (2023-07-11) + + +### ⚠ BREAKING CHANGES + +* move to node 14 as minimum version ([#457](https://github.com/googleapis/node-gtoken/issues/457)) +* remove support for conversion of *.p12 to *.pem ([#452](https://github.com/googleapis/node-gtoken/issues/452)) + +### Features + +* Remove support for conversion of *.p12 to *.pem ([#452](https://github.com/googleapis/node-gtoken/issues/452)) ([522a96d](https://github.com/googleapis/node-gtoken/commit/522a96dd38ad5d486e9337f72efdf1a5523fded4)) + + +### Miscellaneous Chores + +* Move to node 14 as minimum version ([#457](https://github.com/googleapis/node-gtoken/issues/457)) ([429df81](https://github.com/googleapis/node-gtoken/commit/429df814cd4224d5eacce72cfe8e924e53cc7f30)) + +## [6.1.2](https://github.com/googleapis/node-gtoken/compare/v6.1.1...v6.1.2) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/node-gtoken/issues/1546)) ([#440](https://github.com/googleapis/node-gtoken/issues/440)) ([6fb8562](https://github.com/googleapis/node-gtoken/commit/6fb856207b07112096c033adb6d3f0edf5e5093d)) + +## [6.1.1](https://github.com/googleapis/node-gtoken/compare/v6.1.0...v6.1.1) (2022-08-04) + + +### Bug Fixes + +* **deps:** update gaxios to 5.0.1 ([#436](https://github.com/googleapis/node-gtoken/issues/436)) ([0d5d0e9](https://github.com/googleapis/node-gtoken/commit/0d5d0e9c6ec51911f1e44d97c02dc0cd791c7d05)) + +## [6.1.0](https://github.com/googleapis/node-gtoken/compare/v6.0.1...v6.1.0) (2022-06-28) + + +### Features + +* allow customizing the http client ([#426](https://github.com/googleapis/node-gtoken/issues/426)) ([408ad04](https://github.com/googleapis/node-gtoken/commit/408ad048a2595313717bf427fc34aa29a6e7fb3c)) + +## [6.0.1](https://github.com/googleapis/node-gtoken/compare/v6.0.0...v6.0.1) (2022-06-07) + + +### Bug Fixes + +* **deps:** update dependency google-p12-pem to v4 ([#430](https://github.com/googleapis/node-gtoken/issues/430)) ([bd0848b](https://github.com/googleapis/node-gtoken/commit/bd0848b15554742e2fd73b05073bd84e1aec2a3f)) + +## [6.0.0](https://github.com/googleapis/node-gtoken/compare/v5.3.2...v6.0.0) (2022-05-10) + + +### ⚠ BREAKING CHANGES + +* update library to use Node 12 (#428) + +### Build System + +* update library to use Node 12 ([#428](https://github.com/googleapis/node-gtoken/issues/428)) ([288b662](https://github.com/googleapis/node-gtoken/commit/288b662d990ce53a6824ddc67c384253487fdc04)) + +### [5.3.2](https://github.com/googleapis/node-gtoken/compare/v5.3.1...v5.3.2) (2022-01-28) + + +### Bug Fixes + +* upgrade google-p12-pem to 3.1.3 ([#413](https://github.com/googleapis/node-gtoken/issues/413)) ([c3cebaf](https://github.com/googleapis/node-gtoken/commit/c3cebaf620c62f57385804fe5d852ce3e6398dc1)) + +### [5.3.1](https://www.github.com/googleapis/node-gtoken/compare/v5.3.0...v5.3.1) (2021-08-11) + + +### Bug Fixes + +* **build:** migrate to using main branch ([#392](https://www.github.com/googleapis/node-gtoken/issues/392)) ([992e3c5](https://www.github.com/googleapis/node-gtoken/commit/992e3c5e1520a376269b2476e5ce225f6ee96e2b)) + +## [5.3.0](https://www.github.com/googleapis/node-gtoken/compare/v5.2.1...v5.3.0) (2021-06-10) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#369](https://www.github.com/googleapis/node-gtoken/issues/369)) ([3142215](https://www.github.com/googleapis/node-gtoken/commit/3142215277ae2daa33f7fb3300f09ef438ded01f)) + +### [5.2.1](https://www.github.com/googleapis/node-gtoken/compare/v5.2.0...v5.2.1) (2021-01-26) + + +### Bug Fixes + +* **deps:** remove dependency on mime ([#357](https://www.github.com/googleapis/node-gtoken/issues/357)) ([0a1e6b3](https://www.github.com/googleapis/node-gtoken/commit/0a1e6b32206364106631c0ca8cdd2e325de2af32)) + +## [5.2.0](https://www.github.com/googleapis/node-gtoken/compare/v5.1.0...v5.2.0) (2021-01-14) + + +### Features + +* request new tokens before they expire ([#349](https://www.github.com/googleapis/node-gtoken/issues/349)) ([e84d9a3](https://www.github.com/googleapis/node-gtoken/commit/e84d9a31517c1449141708a0a2cddd9d0129fa95)) + +## [5.1.0](https://www.github.com/googleapis/node-gtoken/compare/v5.0.5...v5.1.0) (2020-11-14) + + +### Features + +* dedupe concurrent requests ([#351](https://www.github.com/googleapis/node-gtoken/issues/351)) ([9001f1d](https://www.github.com/googleapis/node-gtoken/commit/9001f1d00931f480d40fa323c9b527beaef2254a)) + +### [5.0.5](https://www.github.com/googleapis/node-gtoken/compare/v5.0.4...v5.0.5) (2020-10-22) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v4 ([#342](https://www.github.com/googleapis/node-gtoken/issues/342)) ([7954a19](https://www.github.com/googleapis/node-gtoken/commit/7954a197e923469b031f0833a2016fa0378285b1)) + +### [5.0.4](https://www.github.com/googleapis/node-gtoken/compare/v5.0.3...v5.0.4) (2020-10-06) + + +### Bug Fixes + +* **deps:** upgrade google-p12-pem ([#337](https://www.github.com/googleapis/node-gtoken/issues/337)) ([77a749d](https://www.github.com/googleapis/node-gtoken/commit/77a749d646c7ccc68e974f27827a9d538dfea784)) + +### [5.0.3](https://www.github.com/googleapis/node-gtoken/compare/v5.0.2...v5.0.3) (2020-07-27) + + +### Bug Fixes + +* move gitattributes files to node templates ([#322](https://www.github.com/googleapis/node-gtoken/issues/322)) ([1d1786b](https://www.github.com/googleapis/node-gtoken/commit/1d1786b8915cd9a33577237ec6a6148a29e11a88)) + +### [5.0.2](https://www.github.com/googleapis/node-gtoken/compare/v5.0.1...v5.0.2) (2020-07-09) + + +### Bug Fixes + +* typeo in nodejs .gitattribute ([#311](https://www.github.com/googleapis/node-gtoken/issues/311)) ([8e17b4c](https://www.github.com/googleapis/node-gtoken/commit/8e17b4c0757832b2d31178684a6b24e1759d9f76)) + +### [5.0.1](https://www.github.com/googleapis/node-gtoken/compare/v5.0.0...v5.0.1) (2020-04-13) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v3 ([#287](https://www.github.com/googleapis/node-gtoken/issues/287)) ([033731e](https://www.github.com/googleapis/node-gtoken/commit/033731e128fef0034b07b13183044e5060809418)) +* **deps:** update dependency google-p12-pem to v3 ([#280](https://www.github.com/googleapis/node-gtoken/issues/280)) ([25121b0](https://www.github.com/googleapis/node-gtoken/commit/25121b00cc9a4d32854f36ea8bc4bbd2cb77afbb)) +* apache license URL ([#468](https://www.github.com/googleapis/node-gtoken/issues/468)) ([#293](https://www.github.com/googleapis/node-gtoken/issues/293)) ([14a5bcd](https://www.github.com/googleapis/node-gtoken/commit/14a5bcd52d7b18d787c620451471e904784222d9)) + +## [5.0.0](https://www.github.com/googleapis/node-gtoken/compare/v4.1.4...v5.0.0) (2020-03-24) + + +### ⚠ BREAKING CHANGES + +* drop Node 8 from engines (#284) +* typescript@3.7.x introduced breaking changes to compiled code + +### Features + +* drop Node 8 from engines ([#284](https://www.github.com/googleapis/node-gtoken/issues/284)) ([209e007](https://www.github.com/googleapis/node-gtoken/commit/209e00746116a82a3cf9acc158aff12a4971f3d0)) + + +### Build System + +* update gts and typescript ([#283](https://www.github.com/googleapis/node-gtoken/issues/283)) ([ff076dc](https://www.github.com/googleapis/node-gtoken/commit/ff076dcb3da229238e7bed28d739c48986652c78)) + +### [4.1.4](https://www.github.com/googleapis/node-gtoken/compare/v4.1.3...v4.1.4) (2020-01-06) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([f1ae7b6](https://www.github.com/googleapis/node-gtoken/commit/f1ae7b64ead1c918546ae5bbe8546dfb4ecc788a)) +* **deps:** update dependency jws to v4 ([#251](https://www.github.com/googleapis/node-gtoken/issues/251)) ([e13542f](https://www.github.com/googleapis/node-gtoken/commit/e13542f888a81ed3ced0023e9b78ed25264b1d1c)) + +### [4.1.3](https://www.github.com/googleapis/node-gtoken/compare/v4.1.2...v4.1.3) (2019-11-15) + + +### Bug Fixes + +* **deps:** use typescript ~3.6.0 ([#246](https://www.github.com/googleapis/node-gtoken/issues/246)) ([5f725b7](https://www.github.com/googleapis/node-gtoken/commit/5f725b71f080e83058b1a23340acadc0c8704123)) + +### [4.1.2](https://www.github.com/googleapis/node-gtoken/compare/v4.1.1...v4.1.2) (2019-11-13) + + +### Bug Fixes + +* **docs:** add jsdoc-region-tag plugin ([#242](https://www.github.com/googleapis/node-gtoken/issues/242)) ([994c5cc](https://www.github.com/googleapis/node-gtoken/commit/994c5ccf92731599aa63b84c29a9d5f6b1431cc5)) + +### [4.1.1](https://www.github.com/googleapis/node-gtoken/compare/v4.1.0...v4.1.1) (2019-10-31) + + +### Bug Fixes + +* **deps:** update gaxios to 2.1.0 ([#238](https://www.github.com/googleapis/node-gtoken/issues/238)) ([bb12064](https://www.github.com/googleapis/node-gtoken/commit/bb1206420388399ef8992efe54c70bdb3fdcd965)) + +## [4.1.0](https://www.github.com/googleapis/node-gtoken/compare/v4.0.0...v4.1.0) (2019-09-24) + + +### Features + +* allow upstream libraries to force token refresh ([#229](https://www.github.com/googleapis/node-gtoken/issues/229)) ([1fd4dd1](https://www.github.com/googleapis/node-gtoken/commit/1fd4dd1)) + +## [4.0.0](https://www.github.com/googleapis/node-gtoken/compare/v3.0.2...v4.0.0) (2019-07-09) + + +### ⚠ BREAKING CHANGES + +* This commit creates multiple breaking changes. The `getToken()` +method previously returned `Promise`, where the string was the +`access_token` returned from the response. However, the `oauth2` endpoint could +return a variety of other fields, such as an `id_token` in special cases. + +```js +const token = await getToken(); +// old response: 'some.access.token' +// new response: { access_token: 'some.access.token'} +``` + +To further support this change, the `GoogleToken` class no longer exposes +a `token` variable. It now exposes `rawToken`, `accessToken`, and `idToken` +fields which can be used to access the relevant values returned in the +response. + +### Bug Fixes + +* expose all fields from response ([#218](https://www.github.com/googleapis/node-gtoken/issues/218)) ([d463370](https://www.github.com/googleapis/node-gtoken/commit/d463370)) + +### [3.0.2](https://www.github.com/googleapis/node-gtoken/compare/v3.0.1...v3.0.2) (2019-06-26) + + +### Bug Fixes + +* **docs:** make anchors work in jsdoc ([#215](https://www.github.com/googleapis/node-gtoken/issues/215)) ([c5f6c89](https://www.github.com/googleapis/node-gtoken/commit/c5f6c89)) + +### [3.0.1](https://www.github.com/googleapis/node-gtoken/compare/v3.0.0...v3.0.1) (2019-06-13) + + +### Bug Fixes + +* **docs:** move to new client docs URL ([#212](https://www.github.com/googleapis/node-gtoken/issues/212)) ([b7a8c75](https://www.github.com/googleapis/node-gtoken/commit/b7a8c75)) + +## [3.0.0](https://www.github.com/googleapis/node-gtoken/compare/v2.3.3...v3.0.0) (2019-05-07) + + +### Bug Fixes + +* **deps:** update dependency gaxios to v2 ([#191](https://www.github.com/googleapis/node-gtoken/issues/191)) ([da65ea7](https://www.github.com/googleapis/node-gtoken/commit/da65ea7)) +* **deps:** update dependency google-p12-pem to v2 ([#196](https://www.github.com/googleapis/node-gtoken/issues/196)) ([b510f06](https://www.github.com/googleapis/node-gtoken/commit/b510f06)) +* fs.readFile does not exist in browser ([#186](https://www.github.com/googleapis/node-gtoken/issues/186)) ([a16d8e7](https://www.github.com/googleapis/node-gtoken/commit/a16d8e7)) + + +### Build System + +* upgrade engines field to >=8.10.0 ([#194](https://www.github.com/googleapis/node-gtoken/issues/194)) ([ee4d6c8](https://www.github.com/googleapis/node-gtoken/commit/ee4d6c8)) + + +### BREAKING CHANGES + +* upgrade engines field to >=8.10.0 (#194) + +## v2.3.3 + +03-13-2019 14:54 PDT + +### Bug Fixes +- fix: propagate error message ([#173](https://github.com/google/node-gtoken/pull/173)) + +### Documentation +- docs: update links in contrib guide ([#171](https://github.com/google/node-gtoken/pull/171)) +- docs: move CONTRIBUTING.md to root ([#166](https://github.com/google/node-gtoken/pull/166)) +- docs: add lint/fix example to contributing guide ([#164](https://github.com/google/node-gtoken/pull/164)) + +### Internal / Testing Changes +- build: Add docuploader credentials to node publish jobs ([#176](https://github.com/google/node-gtoken/pull/176)) +- build: use node10 to run samples-test, system-test etc ([#175](https://github.com/google/node-gtoken/pull/175)) +- build: update release configuration +- chore(deps): update dependency mocha to v6 +- build: use linkinator for docs test ([#170](https://github.com/google/node-gtoken/pull/170)) +- build: create docs test npm scripts ([#169](https://github.com/google/node-gtoken/pull/169)) +- build: test using @grpc/grpc-js in CI ([#168](https://github.com/google/node-gtoken/pull/168)) +- build: ignore googleapis.com in doc link check ([#162](https://github.com/google/node-gtoken/pull/162)) +- build: check for 404s on all docs + +## v2.3.2 + +01-09-2019 13:40 PST + +### Documentation +- docs: generate docs with compodoc ([#154](https://github.com/googleapis/node-gtoken/pull/154)) +- docs: fix up the readme ([#153](https://github.com/googleapis/node-gtoken/pull/153)) + +### Internal / Testing Changes +- build: Re-generated to pick up changes in the API or client library generator. ([#158](https://github.com/googleapis/node-gtoken/pull/158)) +- build: check broken links in generated docs ([#152](https://github.com/googleapis/node-gtoken/pull/152)) +- fix: add a system test and get it passing ([#150](https://github.com/googleapis/node-gtoken/pull/150)) +- chore(build): inject yoshi automation key ([#149](https://github.com/googleapis/node-gtoken/pull/149)) + +## v2.3.1 + +12-10-2018 15:28 PST + +### Dependencies +- fix(deps): update dependency pify to v4 ([#87](https://github.com/google/node-gtoken/pull/87)) +- fix(deps): use gaxios for http requests ([#125](https://github.com/google/node-gtoken/pull/125)) + +### Internal / Testing Changes +- build: add Kokoro configs for autorelease ([#143](https://github.com/google/node-gtoken/pull/143)) +- chore: always nyc report before calling codecov ([#141](https://github.com/google/node-gtoken/pull/141)) +- chore: nyc ignore build/test by default ([#140](https://github.com/google/node-gtoken/pull/140)) +- chore: update synth metadata and templates ([#138](https://github.com/google/node-gtoken/pull/138)) +- fix(build): fix system key decryption ([#133](https://github.com/google/node-gtoken/pull/133)) +- chore(deps): update dependency typescript to ~3.2.0 ([#132](https://github.com/google/node-gtoken/pull/132)) +- chore: add a synth.metadata +- chore(deps): update dependency gts to ^0.9.0 ([#127](https://github.com/google/node-gtoken/pull/127)) +- chore: update eslintignore config ([#126](https://github.com/google/node-gtoken/pull/126)) +- chore: use latest npm on Windows ([#124](https://github.com/google/node-gtoken/pull/124)) +- chore: update CircleCI config ([#123](https://github.com/google/node-gtoken/pull/123)) +- chore: include build in eslintignore ([#120](https://github.com/google/node-gtoken/pull/120)) +- chore: update issue templates ([#116](https://github.com/google/node-gtoken/pull/116)) +- chore: remove old issue template ([#114](https://github.com/google/node-gtoken/pull/114)) +- build: run tests on node11 ([#113](https://github.com/google/node-gtoken/pull/113)) +- chore(deps): update dependency nock to v10 ([#111](https://github.com/google/node-gtoken/pull/111)) +- chores(build): do not collect sponge.xml from windows builds ([#112](https://github.com/google/node-gtoken/pull/112)) +- chore(deps): update dependency typescript to ~3.1.0 ([#110](https://github.com/google/node-gtoken/pull/110)) +- chores(build): run codecov on continuous builds ([#109](https://github.com/google/node-gtoken/pull/109)) +- chore: update new issue template ([#108](https://github.com/google/node-gtoken/pull/108)) +- chore: update CI config ([#105](https://github.com/google/node-gtoken/pull/105)) +- Update kokoro config ([#103](https://github.com/google/node-gtoken/pull/103)) +- Update CI config ([#101](https://github.com/google/node-gtoken/pull/101)) +- Don't publish sourcemaps ([#99](https://github.com/google/node-gtoken/pull/99)) +- Update kokoro config ([#97](https://github.com/google/node-gtoken/pull/97)) +- test: remove appveyor config ([#96](https://github.com/google/node-gtoken/pull/96)) +- Update CI config ([#95](https://github.com/google/node-gtoken/pull/95)) +- Enable prefer-const in the eslint config ([#94](https://github.com/google/node-gtoken/pull/94)) +- Enable no-var in eslint ([#93](https://github.com/google/node-gtoken/pull/93)) +- Update CI config ([#92](https://github.com/google/node-gtoken/pull/92)) +- Add synth and update CI config ([#91](https://github.com/google/node-gtoken/pull/91)) +- Retry npm install in CI ([#90](https://github.com/google/node-gtoken/pull/90)) +- chore(deps): update dependency nyc to v13 ([#88](https://github.com/google/node-gtoken/pull/88)) +- chore: ignore package-log.json ([#86](https://github.com/google/node-gtoken/pull/86)) +- chore: update renovate config ([#83](https://github.com/google/node-gtoken/pull/83)) +- chore(deps): lock file maintenance ([#85](https://github.com/google/node-gtoken/pull/85)) +- chore: remove greenkeeper badge ([#82](https://github.com/google/node-gtoken/pull/82)) +- test: throw on deprecation ([#81](https://github.com/google/node-gtoken/pull/81)) +- chore(deps): update dependency typescript to v3 ([#80](https://github.com/google/node-gtoken/pull/80)) +- chore: move mocha options to mocha.opts ([#78](https://github.com/google/node-gtoken/pull/78)) +- chore(deps): lock file maintenance ([#79](https://github.com/google/node-gtoken/pull/79)) +- test: use strictEqual in tests ([#76](https://github.com/google/node-gtoken/pull/76)) +- chore(deps): lock file maintenance ([#77](https://github.com/google/node-gtoken/pull/77)) +- chore(deps): update dependency typescript to ~2.9.0 ([#75](https://github.com/google/node-gtoken/pull/75)) +- chore: Configure Renovate ([#74](https://github.com/google/node-gtoken/pull/74)) +- Update gts to the latest version 🚀 ([#73](https://github.com/google/node-gtoken/pull/73)) +- Add Code of Conduct +- build: start testing against Node 10 ([#69](https://github.com/google/node-gtoken/pull/69)) +- chore(package): update nyc to version 12.0.2 ([#67](https://github.com/google/node-gtoken/pull/67)) +- chore(package): update @types/node to version 10.0.3 ([#65](https://github.com/google/node-gtoken/pull/65)) + +### 2.0.0 +New features: +- API now supports callback and promise based workflows + +Breaking changes: +- `GoogleToken` is now a class type, and must be instantiated. +- `GoogleToken.expires_at` renamed to `GoogleToken.expiresAt` +- `GoogleToken.raw_token` renamed to `GoogleToken.rawToken` +- `GoogleToken.token_expires` renamed to `GoogleToken.tokenExpires` diff --git a/node_modules/gtoken/LICENSE b/node_modules/gtoken/LICENSE new file mode 100644 index 0000000..061e6a6 --- /dev/null +++ b/node_modules/gtoken/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Ryan Seys + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/gtoken/README.md b/node_modules/gtoken/README.md new file mode 100644 index 0000000..9dcb82e --- /dev/null +++ b/node_modules/gtoken/README.md @@ -0,0 +1,187 @@ +Google Cloud Platform logo + +# [node-gtoken](https://github.com/googleapis/node-gtoken) + +[![npm version][npm-image]][npm-url] +[![Known Vulnerabilities][snyk-image]][snyk-url] +[![codecov][codecov-image]][codecov-url] +[![Code Style: Google][gts-image]][gts-url] + +> Node.js Google Authentication Service Account Tokens + +This is a low level utility library used to interact with Google Authentication services. **In most cases, you probably want to use the [google-auth-library](https://github.com/googleapis/google-auth-library-nodejs) instead.** + +* [gtoken API Reference][client-docs] +* [github.com/googleapis/node-gtoken](https://github.com/googleapis/node-gtoken) + +## Installation + +``` sh +npm install gtoken +``` + +## Usage + +### Use with a `.pem` or `.json` key file: + +``` js +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + keyFile: 'path/to/key.pem', // or path to .json key file + email: 'my_service_account_email@developer.gserviceaccount.com', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); + +gtoken.getToken((err, tokens) => { + if (err) { + console.log(err); + return; + } + console.log(tokens); + // { + // access_token: 'very-secret-token', + // expires_in: 3600, + // token_type: 'Bearer' + // } +}); +``` + +You can also use the async/await style API: + +``` js +const tokens = await gtoken.getToken() +console.log(tokens); +``` + +Or use promises: + +```js +gtoken.getToken() + .then(tokens => { + console.log(tokens) + }) + .catch(console.error); +``` + +### Use with a service account `.json` key file: + +``` js +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + keyFile: 'path/to/key.json', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); + +gtoken.getToken((err, tokens) => { + if (err) { + console.log(err); + return; + } + console.log(tokens); +}); +``` + +### Pass the private key as a string directly: + +``` js +const key = '-----BEGIN RSA PRIVATE KEY-----\nXXXXXXXXXXX...'; +const { GoogleToken } = require('gtoken'); +const gtoken = new GoogleToken({ + email: 'my_service_account_email@developer.gserviceaccount.com', + scope: ['https://scope1', 'https://scope2'], // or space-delimited string of scopes + key: key, + eagerRefreshThresholdMillis: 5 * 60 * 1000 +}); +``` + +## Options + +> Various options that can be set when creating initializing the `gtoken` object. + +- `options.email or options.iss`: The service account email address. +- `options.scope`: An array of scope strings or space-delimited string of scopes. +- `options.sub`: The email address of the user requesting delegated access. +- `options.keyFile`: The filename of `.json` key or `.pem` key. +- `options.key`: The raw RSA private key value, in place of using `options.keyFile`. +- `options.additionalClaims`: Additional claims to include in the JWT when requesting a token. +- `options.eagerRefreshThresholdMillis`: How long must a token be valid for in order to return it from the cache. Defaults to 0. + +### .getToken(callback) + +> Returns the cached tokens or requests a new one and returns it. + +``` js +gtoken.getToken((err, token) => { + console.log(err || token); + // gtoken.rawToken value is also set +}); +``` + +### .getCredentials('path/to/key.json') + +> Given a keyfile, returns the key and (if available) the client email. + +```js +const creds = await gtoken.getCredentials('path/to/key.json'); +``` + +### Properties + +> Various properties set on the gtoken object after call to `.getToken()`. + +- `gtoken.idToken`: The OIDC token returned (if any). +- `gtoken.accessToken`: The access token. +- `gtoken.expiresAt`: The expiry date as milliseconds since 1970/01/01 +- `gtoken.key`: The raw key value. +- `gtoken.rawToken`: Most recent raw token data received from Google. + +### .hasExpired() + +> Returns true if the token has expired, or token does not exist. + +``` js +const tokens = await gtoken.getToken(); +gtoken.hasExpired(); // false +``` + +### .revokeToken() + +> Revoke the token if set. + +``` js +await gtoken.revokeToken(); +console.log('Token revoked!'); +``` + +## Downloading your private `.json` key from Google + +1. Open the [Google Developer Console][gdevconsole]. +2. Open your project and under "APIs & auth", click Credentials. +3. Generate a new `.json` key and download it into your project. + +## Converting your `.p12` key to a `.pem` key + +If you'd like to convert to a `.pem` for use later, use OpenSSL if you have it installed. + +``` sh +$ openssl pkcs12 -in key.p12 -nodes -nocerts > key.pem +``` + +Don't forget, the passphrase when converting these files is the string `'notasecret'` + +## License + +[MIT](https://github.com/googleapis/node-gtoken/blob/main/LICENSE) + +[codecov-image]: https://codecov.io/gh/googleapis/node-gtoken/branch/main/graph/badge.svg +[codecov-url]: https://codecov.io/gh/googleapis/node-gtoken +[gdevconsole]: https://console.developers.google.com +[gts-image]: https://img.shields.io/badge/code%20style-google-blueviolet.svg +[gts-url]: https://www.npmjs.com/package/gts +[npm-image]: https://img.shields.io/npm/v/gtoken.svg +[npm-url]: https://npmjs.org/package/gtoken +[snyk-image]: https://snyk.io/test/github/googleapis/node-gtoken/badge.svg +[snyk-url]: https://snyk.io/test/github/googleapis/node-gtoken +[client-docs]: https://googleapis.dev/nodejs/gtoken/latest/ diff --git a/node_modules/gtoken/build/src/index.d.ts b/node_modules/gtoken/build/src/index.d.ts new file mode 100644 index 0000000..2ffe342 --- /dev/null +++ b/node_modules/gtoken/build/src/index.d.ts @@ -0,0 +1,93 @@ +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +import { GaxiosOptions, GaxiosPromise } from 'gaxios'; +export interface Transporter { + request(opts: GaxiosOptions): GaxiosPromise; +} +export type GetTokenCallback = (err: Error | null, token?: TokenData) => void; +export interface Credentials { + privateKey: string; + clientEmail?: string; +} +export interface TokenData { + refresh_token?: string; + expires_in?: number; + access_token?: string; + token_type?: string; + id_token?: string; +} +export interface TokenOptions { + keyFile?: string; + key?: string; + email?: string; + iss?: string; + sub?: string; + scope?: string | string[]; + additionalClaims?: {}; + eagerRefreshThresholdMillis?: number; + transporter?: Transporter; +} +export interface GetTokenOptions { + forceRefresh?: boolean; +} +export declare class GoogleToken { + #private; + get accessToken(): string | undefined; + get idToken(): string | undefined; + get tokenType(): string | undefined; + get refreshToken(): string | undefined; + expiresAt?: number; + key?: string; + keyFile?: string; + iss?: string; + sub?: string; + scope?: string; + rawToken?: TokenData; + tokenExpires?: number; + email?: string; + additionalClaims?: {}; + eagerRefreshThresholdMillis?: number; + transporter: Transporter; + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options?: TokenOptions); + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired(): boolean; + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring(): boolean; + /** + * Returns a cached token or retrieves a new one from Google. + * + * @param callback The callback function. + */ + getToken(opts?: GetTokenOptions): Promise; + getToken(callback: GetTokenCallback, opts?: GetTokenOptions): void; + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + getCredentials(keyFile: string): Promise; + /** + * Revoke the token if one is set. + * + * @param callback The callback function. + */ + revokeToken(): Promise; + revokeToken(callback: (err?: Error) => void): void; +} diff --git a/node_modules/gtoken/build/src/index.js b/node_modules/gtoken/build/src/index.js new file mode 100644 index 0000000..bf45042 --- /dev/null +++ b/node_modules/gtoken/build/src/index.js @@ -0,0 +1,276 @@ +"use strict"; +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GoogleToken = void 0; +const fs = require("fs"); +const gaxios_1 = require("gaxios"); +const jws = require("jws"); +const path = require("path"); +const util_1 = require("util"); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/gtoken/package.json b/node_modules/gtoken/package.json new file mode 100644 index 0000000..3c76f38 --- /dev/null +++ b/node_modules/gtoken/package.json @@ -0,0 +1,62 @@ +{ + "name": "gtoken", + "version": "7.1.0", + "description": "Node.js Google Authentication Service Account Tokens", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "repository": "google/node-gtoken", + "scripts": { + "lint": "gts check", + "clean": "gts clean", + "fix": "gts fix", + "compile": "tsc -p .", + "test": "c8 mocha build/test", + "prepare": "npm run compile", + "pretest": "npm run compile", + "presystem-test": "npm run compile", + "system-test": "mocha build/system-test", + "samples-test": "cd samples/ && npm link ../ && npm test && cd ../", + "docs": "compodoc src/", + "docs-test": "linkinator docs", + "predocs-test": "npm run docs", + "prelint": "cd samples; npm link ../; npm install", + "precompile": "gts clean" + }, + "keywords": [ + "google", + "service", + "account", + "api", + "token", + "api", + "auth" + ], + "author": { + "name": "Google, LLC" + }, + "license": "MIT", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "devDependencies": { + "@babel/plugin-proposal-private-methods": "^7.18.6", + "@compodoc/compodoc": "^1.1.7", + "@types/jws": "^3.1.0", + "@types/mocha": "^9.0.0", + "@types/node": "^20.0.0", + "c8": "^9.0.0", + "gts": "^5.0.0", + "linkinator": "^4.0.0", + "mocha": "^9.2.2", + "nock": "^13.0.0", + "typescript": "^5.1.6" + }, + "files": [ + "build/src", + "!build/src/**/*.map" + ] +} diff --git a/node_modules/http-proxy-agent/README.md b/node_modules/http-proxy-agent/README.md new file mode 100644 index 0000000..d60e206 --- /dev/null +++ b/node_modules/http-proxy-agent/README.md @@ -0,0 +1,74 @@ +http-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTP +[![Build Status](https://github.com/TooTallNate/node-http-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-http-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `http` module. + +__Note:__ For HTTP proxy usage with the `https` module, check out +[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent). + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install http-proxy-agent +``` + + +Example +------- + +``` js +var url = require('url'); +var http = require('http'); +var HttpProxyAgent = require('http-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'http://nodejs.org/api/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `HttpProxyAgent` class with the proxy server information +var agent = new HttpProxyAgent(proxy); +opts.agent = agent; + +http.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/http-proxy-agent/dist/agent.d.ts b/node_modules/http-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..3f043f7 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.d.ts @@ -0,0 +1,32 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpProxyAgentOptions } from '.'; +interface HttpProxyAgentClientRequest extends ClientRequest { + path: string; + output?: string[]; + outputData?: { + data: string; + }[]; + _header?: string | null; + _implicitHeader(): void; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +export default class HttpProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/node_modules/http-proxy-agent/dist/agent.js b/node_modules/http-proxy-agent/dist/agent.js new file mode 100644 index 0000000..aca8280 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.js @@ -0,0 +1,145 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const debug_1 = __importDefault(require("debug")); +const once_1 = __importDefault(require("@tootallnate/once")); +const agent_base_1 = require("agent-base"); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports.default = HttpProxyAgent; +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/http-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..bd3b56a --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC;aACjB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,IAAA,cAAI,EAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/index.d.ts b/node_modules/http-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..24bdb52 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.d.ts @@ -0,0 +1,21 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import _HttpProxyAgent from './agent'; +declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent; +declare namespace createHttpProxyAgent { + interface BaseHttpProxyAgentOptions { + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> { + } + export type HttpProxyAgent = _HttpProxyAgent; + export const HttpProxyAgent: typeof _HttpProxyAgent; + export {}; +} +export = createHttpProxyAgent; diff --git a/node_modules/http-proxy-agent/dist/index.js b/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 0000000..0a71180 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/index.js.map b/node_modules/http-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..e07dae5 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/README.md b/node_modules/http-proxy-agent/node_modules/agent-base/README.md new file mode 100644 index 0000000..256f1f3 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/README.md @@ -0,0 +1,145 @@ +agent-base +========== +### Turn a function into an [`http.Agent`][http.Agent] instance +[![Build Status](https://github.com/TooTallNate/node-agent-base/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-agent-base/actions?workflow=Node+CI) + +This module provides an `http.Agent` generator. That is, you pass it an async +callback function, and it returns a new `http.Agent` instance that will invoke the +given callback function when sending outbound HTTP requests. + +#### Some subclasses: + +Here's some more interesting uses of `agent-base`. +Send a pull request to list yours! + + * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints + * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints + * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS + * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install agent-base +``` + + +Example +------- + +Here's a minimal example that creates a new `net.Socket` connection to the server +for every HTTP request (i.e. the equivalent of `agent: false` option): + +```js +var net = require('net'); +var tls = require('tls'); +var url = require('url'); +var http = require('http'); +var agent = require('agent-base'); + +var endpoint = 'http://nodejs.org/api/'; +var parsed = url.parse(endpoint); + +// This is the important part! +parsed.agent = agent(function (req, opts) { + var socket; + // `secureEndpoint` is true when using the https module + if (opts.secureEndpoint) { + socket = tls.connect(opts); + } else { + socket = net.connect(opts); + } + return socket; +}); + +// Everything else works just like normal... +http.get(parsed, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +Returning a Promise or using an `async` function is also supported: + +```js +agent(async function (req, opts) { + await sleep(1000); + // etc… +}); +``` + +Return another `http.Agent` instance to "pass through" the responsibility +for that HTTP request to that agent: + +```js +agent(function (req, opts) { + return opts.secureEndpoint ? https.globalAgent : http.globalAgent; +}); +``` + + +API +--- + +## Agent(Function callback[, Object options]) → [http.Agent][] + +Creates a base `http.Agent` that will execute the callback function `callback` +for every HTTP request that it is used as the `agent` for. The callback function +is responsible for creating a `stream.Duplex` instance of some kind that will be +used as the underlying socket in the HTTP request. + +The `options` object accepts the following properties: + + * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional). + +The callback function should have the following signature: + +### callback(http.ClientRequest req, Object options, Function cb) → undefined + +The ClientRequest `req` can be accessed to read request headers and +and the path, etc. The `options` object contains the options passed +to the `http.request()`/`https.request()` function call, and is formatted +to be directly passed to `net.connect()`/`tls.connect()`, or however +else you want a Socket to be created. Pass the created socket to +the callback function `cb` once created, and the HTTP request will +continue to proceed. + +If the `https` module is used to invoke the HTTP request, then the +`secureEndpoint` property on `options` _will be set to `true`_. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent +[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent +[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent +[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent +[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.d.ts b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.d.ts new file mode 100644 index 0000000..bc4ab74 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.d.ts @@ -0,0 +1,78 @@ +/// +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +declare function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +declare function createAgent(callback: createAgent.AgentCallback, opts?: createAgent.AgentOptions): createAgent.Agent; +declare namespace createAgent { + interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + interface AgentRequestOptions { + host?: string; + path?: string; + port: number; + } + interface HttpRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: false; + } + interface HttpsRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: true; + } + type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + type AgentLike = Pick | http.Agent; + type AgentCallbackReturn = Duplex | AgentLike; + type AgentCallbackCallback = (err?: Error | null, socket?: createAgent.AgentCallbackReturn) => void; + type AgentCallbackPromise = (req: createAgent.ClientRequest, opts: createAgent.RequestOptions) => createAgent.AgentCallbackReturn | Promise; + type AgentCallback = typeof Agent.prototype.callback; + type AgentOptions = { + timeout?: number; + }; + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends EventEmitter { + timeout: number | null; + maxFreeSockets: number; + maxTotalSockets: number; + maxSockets: number; + sockets: { + [key: string]: net.Socket[]; + }; + freeSockets: { + [key: string]: net.Socket[]; + }; + requests: { + [key: string]: http.IncomingMessage[]; + }; + options: https.AgentOptions; + private promisifiedCallback?; + private explicitDefaultPort?; + private explicitProtocol?; + constructor(callback?: createAgent.AgentCallback | createAgent.AgentOptions, _opts?: createAgent.AgentOptions); + get defaultPort(): number; + set defaultPort(v: number); + get protocol(): string; + set protocol(v: string); + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions, fn: createAgent.AgentCallbackCallback): void; + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions): createAgent.AgentCallbackReturn | Promise; + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void; + freeSocket(socket: net.Socket, opts: AgentOptions): void; + destroy(): void; + } +} +export = createAgent; diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js new file mode 100644 index 0000000..bfd9e22 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js @@ -0,0 +1,203 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = require("events"); +const debug_1 = __importDefault(require("debug")); +const promisify_1 = __importDefault(require("./promisify")); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js.map b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js.map new file mode 100644 index 0000000..bd118ab --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAIA,mCAAsC;AACtC,kDAAgC;AAChC,4DAAoC;AAEpC,MAAM,KAAK,GAAG,eAAW,CAAC,YAAY,CAAC,CAAC;AAExC,SAAS,OAAO,CAAC,CAAM;IACtB,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,UAAU,CAAC;AACzD,CAAC;AAED,SAAS,gBAAgB;IACxB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,CAAC;IAC9B,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,OAAO,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,IAAK,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACxG,CAAC;AAOD,SAAS,WAAW,CACnB,QAA+D,EAC/D,IAA+B;IAE/B,OAAO,IAAI,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,WAAU,WAAW;IAmDpB;;;;;;OAMG;IACH,MAAa,KAAM,SAAQ,qBAAY;QAmBtC,YACC,QAA+D,EAC/D,KAAgC;YAEhC,KAAK,EAAE,CAAC;YAER,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;gBACnC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;aACzB;iBAAM,IAAI,QAAQ,EAAE;gBACpB,IAAI,GAAG,QAAQ,CAAC;aAChB;YAED,0DAA0D;YAC1D,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;aAC5B;YAED,+DAA+D;YAC/D,0DAA0D;YAC1D,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;YACxB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YACpB,IAAI,CAAC,eAAe,GAAG,QAAQ,CAAC;YAChC,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;YAClB,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;YACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QACnB,CAAC;QAED,IAAI,WAAW;YACd,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;gBACjD,OAAO,IAAI,CAAC,mBAAmB,CAAC;aAChC;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QAED,IAAI,WAAW,CAAC,CAAS;YACxB,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,QAAQ;YACX,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,QAAQ,EAAE;gBAC9C,OAAO,IAAI,CAAC,gBAAgB,CAAC;aAC7B;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;QAChD,CAAC;QAED,IAAI,QAAQ,CAAC,CAAS;YACrB,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC;QAC3B,CAAC;QAaD,QAAQ,CACP,GAA8B,EAC9B,IAA8B,EAC9B,EAAsC;YAKtC,MAAM,IAAI,KAAK,CACd,yFAAyF,CACzF,CAAC;QACH,CAAC;QAED;;;;;WAKG;QACH,UAAU,CAAC,GAAkB,EAAE,KAAqB;YACnD,MAAM,IAAI,qBAAwB,KAAK,CAAE,CAAC;YAE1C,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;gBAC7C,IAAI,CAAC,cAAc,GAAG,gBAAgB,EAAE,CAAC;aACzC;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;aACxB;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;aACzD;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE;gBAC3B,2DAA2D;gBAC3D,0DAA0D;gBAC1D,4DAA4D;gBAC5D,8CAA8C;gBAC9C,OAAO,IAAI,CAAC,IAAI,CAAC;aACjB;YAED,OAAO,IAAI,CAAC,KAAK,CAAC;YAClB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACrB,OAAO,IAAI,CAAC,aAAa,CAAC;YAC1B,OAAO,IAAI,CAAC,WAAW,CAAC;YACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC;YAE7B,kCAAkC;YAClC,2CAA2C;YAC3C,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;YACjB,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAE5B,IAAI,QAAQ,GAAG,KAAK,CAAC;YACrB,IAAI,SAAS,GAAyC,IAAI,CAAC;YAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAE/C,MAAM,OAAO,GAAG,CAAC,GAA0B,EAAE,EAAE;gBAC9C,IAAI,GAAG,CAAC,SAAS;oBAAE,OAAO;gBAC1B,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBACvB,yDAAyD;gBACzD,iEAAiE;gBACjE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,CAAC,CAAC;YAEF,MAAM,SAAS,GAAG,GAAG,EAAE;gBACtB,SAAS,GAAG,IAAI,CAAC;gBACjB,QAAQ,GAAG,IAAI,CAAC;gBAChB,MAAM,GAAG,GAA0B,IAAI,KAAK,CAC3C,sDAAsD,SAAS,IAAI,CACnE,CAAC;gBACF,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,aAAa,GAAG,CAAC,GAA0B,EAAE,EAAE;gBACpD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,KAAK,IAAI,EAAE;oBACvB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBACD,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,QAAQ,GAAG,CAAC,MAA2B,EAAE,EAAE;gBAChD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,IAAI,IAAI,EAAE;oBACtB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBAED,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE;oBACpB,oDAAoD;oBACpD,wDAAwD;oBACxD,eAAe;oBACf,KAAK,CACJ,6CAA6C,EAC7C,MAAM,CAAC,WAAW,CAAC,IAAI,CACvB,CAAC;oBACD,MAA4B,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpD,OAAO;iBACP;gBAED,IAAI,MAAM,EAAE;oBACX,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE;wBACxB,IAAI,CAAC,UAAU,CAAC,MAAoB,EAAE,IAAI,CAAC,CAAC;oBAC7C,CAAC,CAAC,CAAC;oBACH,GAAG,CAAC,QAAQ,CAAC,MAAoB,CAAC,CAAC;oBACnC,OAAO;iBACP;gBAED,MAAM,GAAG,GAAG,IAAI,KAAK,CACpB,qDAAqD,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,IAAI,CAC/E,CAAC;gBACF,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACxC,OAAO,CAAC,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC,CAAC;gBAChD,OAAO;aACP;YAED,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;gBAC9B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE;oBAC9B,KAAK,CAAC,gDAAgD,CAAC,CAAC;oBACxD,IAAI,CAAC,mBAAmB,GAAG,mBAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;iBACpD;qBAAM;oBACN,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,QAAQ,CAAC;iBACzC;aACD;YAED,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,GAAG,CAAC,EAAE;gBACnD,SAAS,GAAG,UAAU,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;aAC7C;YAED,IAAI,MAAM,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACpD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI;gBACH,KAAK,CACJ,qCAAqC,EACrC,IAAI,CAAC,QAAQ,EACb,GAAG,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,CAC3B,CAAC;gBACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CACxD,QAAQ,EACR,aAAa,CACb,CAAC;aACF;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;aACzC;QACF,CAAC;QAED,UAAU,CAAC,MAAkB,EAAE,IAAkB;YAChD,KAAK,CAAC,sBAAsB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAC7D,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,CAAC;QAED,OAAO;YACN,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrD,CAAC;KACD;IAxPY,iBAAK,QAwPjB,CAAA;IAED,uCAAuC;IACvC,WAAW,CAAC,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC;AACrD,CAAC,EAtTS,WAAW,KAAX,WAAW,QAsTpB;AAED,iBAAS,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.d.ts b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.d.ts new file mode 100644 index 0000000..0268869 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.d.ts @@ -0,0 +1,4 @@ +import { ClientRequest, RequestOptions, AgentCallbackCallback, AgentCallbackPromise } from './index'; +declare type LegacyCallback = (req: ClientRequest, opts: RequestOptions, fn: AgentCallbackCallback) => void; +export default function promisify(fn: LegacyCallback): AgentCallbackPromise; +export {}; diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js new file mode 100644 index 0000000..b2f6132 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports.default = promisify; +//# sourceMappingURL=promisify.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js.map b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js.map new file mode 100644 index 0000000..4bff9bf --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/dist/src/promisify.js.map @@ -0,0 +1 @@ +{"version":3,"file":"promisify.js","sourceRoot":"","sources":["../../src/promisify.ts"],"names":[],"mappings":";;AAeA,SAAwB,SAAS,CAAC,EAAkB;IACnD,OAAO,UAAsB,GAAkB,EAAE,IAAoB;QACpE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,EAAE,CAAC,IAAI,CACN,IAAI,EACJ,GAAG,EACH,IAAI,EACJ,CAAC,GAA6B,EAAE,GAAyB,EAAE,EAAE;gBAC5D,IAAI,GAAG,EAAE;oBACR,MAAM,CAAC,GAAG,CAAC,CAAC;iBACZ;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,CAAC;iBACb;YACF,CAAC,CACD,CAAC;QACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;AACH,CAAC;AAjBD,4BAiBC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/package.json b/node_modules/http-proxy-agent/node_modules/agent-base/package.json new file mode 100644 index 0000000..fadce3a --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/package.json @@ -0,0 +1,64 @@ +{ + "name": "agent-base", + "version": "6.0.2", + "description": "Turn a function into an `http.Agent` instance", + "main": "dist/src/index", + "typings": "dist/src/index", + "files": [ + "dist/src", + "src" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "postbuild": "cpy --parents src test '!**/*.ts' dist", + "test": "mocha --reporter spec dist/test/*.js", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-agent-base.git" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-agent-base/issues" + }, + "dependencies": { + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/mocha": "^5.2.7", + "@types/node": "^14.0.20", + "@types/semver": "^7.1.0", + "@types/ws": "^6.0.3", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "async-listen": "^1.2.0", + "cpy-cli": "^2.0.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.0", + "rimraf": "^3.0.0", + "semver": "^7.1.2", + "typescript": "^3.5.3", + "ws": "^3.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } +} diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/src/index.ts b/node_modules/http-proxy-agent/node_modules/agent-base/src/index.ts new file mode 100644 index 0000000..a47ccd4 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/src/index.ts @@ -0,0 +1,345 @@ +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +import createDebug from 'debug'; +import promisify from './promisify'; + +const debug = createDebug('agent-base'); + +function isAgent(v: any): v is createAgent.AgentLike { + return Boolean(v) && typeof v.addRequest === 'function'; +} + +function isSecureEndpoint(): boolean { + const { stack } = new Error(); + if (typeof stack !== 'string') return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} + +function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +function createAgent( + callback: createAgent.AgentCallback, + opts?: createAgent.AgentOptions +): createAgent.Agent; +function createAgent( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + opts?: createAgent.AgentOptions +) { + return new createAgent.Agent(callback, opts); +} + +namespace createAgent { + export interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + + export interface AgentRequestOptions { + host?: string; + path?: string; + // `port` on `http.RequestOptions` can be a string or undefined, + // but `net.TcpNetConnectOpts` expects only a number + port: number; + } + + export interface HttpRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: false; + } + + export interface HttpsRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: true; + } + + export type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + + export type AgentLike = Pick | http.Agent; + + export type AgentCallbackReturn = Duplex | AgentLike; + + export type AgentCallbackCallback = ( + err?: Error | null, + socket?: createAgent.AgentCallbackReturn + ) => void; + + export type AgentCallbackPromise = ( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ) => + | createAgent.AgentCallbackReturn + | Promise; + + export type AgentCallback = typeof Agent.prototype.callback; + + export type AgentOptions = { + timeout?: number; + }; + + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + export class Agent extends EventEmitter { + public timeout: number | null; + public maxFreeSockets: number; + public maxTotalSockets: number; + public maxSockets: number; + public sockets: { + [key: string]: net.Socket[]; + }; + public freeSockets: { + [key: string]: net.Socket[]; + }; + public requests: { + [key: string]: http.IncomingMessage[]; + }; + public options: https.AgentOptions; + private promisifiedCallback?: createAgent.AgentCallbackPromise; + private explicitDefaultPort?: number; + private explicitProtocol?: string; + + constructor( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + _opts?: createAgent.AgentOptions + ) { + super(); + + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } else if (callback) { + opts = callback; + } + + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + + get defaultPort(): number { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + + set defaultPort(v: number) { + this.explicitDefaultPort = v; + } + + get protocol(): string { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + + set protocol(v: string) { + this.explicitProtocol = v; + } + + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions, + fn: createAgent.AgentCallbackCallback + ): void; + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ): + | createAgent.AgentCallbackReturn + | Promise; + callback( + req: createAgent.ClientRequest, + opts: createAgent.AgentOptions, + fn?: createAgent.AgentCallbackCallback + ): + | createAgent.AgentCallbackReturn + | Promise + | void { + throw new Error( + '"agent-base" has no default implementation, you must subclass and override `callback()`' + ); + } + + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void { + const opts: RequestOptions = { ..._opts }; + + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + + if (opts.host == null) { + opts.host = 'localhost'; + } + + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + + let timedOut = false; + let timeoutId: ReturnType | null = null; + const timeoutMs = opts.timeout || this.timeout; + + const onerror = (err: NodeJS.ErrnoException) => { + if (req._hadError) return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err: NodeJS.ErrnoException = new Error( + `A "socket" was not created for HTTP request before ${timeoutMs}ms` + ); + err.code = 'ETIMEOUT'; + onerror(err); + }; + + const callbackError = (err: NodeJS.ErrnoException) => { + if (timedOut) return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + + const onsocket = (socket: AgentCallbackReturn) => { + if (timedOut) return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug( + 'Callback returned another Agent instance %o', + socket.constructor.name + ); + (socket as createAgent.Agent).addRequest(req, opts); + return; + } + + if (socket) { + socket.once('free', () => { + this.freeSocket(socket as net.Socket, opts); + }); + req.onSocket(socket as net.Socket); + return; + } + + const err = new Error( + `no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\`` + ); + onerror(err); + }; + + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify(this.callback); + } else { + this.promisifiedCallback = this.callback; + } + } + + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + + try { + debug( + 'Resolving socket for %o request: %o', + opts.protocol, + `${req.method} ${req.path}` + ); + Promise.resolve(this.promisifiedCallback(req, opts)).then( + onsocket, + callbackError + ); + } catch (err) { + Promise.reject(err).catch(callbackError); + } + } + + freeSocket(socket: net.Socket, opts: AgentOptions) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +} + +export = createAgent; diff --git a/node_modules/http-proxy-agent/node_modules/agent-base/src/promisify.ts b/node_modules/http-proxy-agent/node_modules/agent-base/src/promisify.ts new file mode 100644 index 0000000..60cc662 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/agent-base/src/promisify.ts @@ -0,0 +1,33 @@ +import { + Agent, + ClientRequest, + RequestOptions, + AgentCallbackCallback, + AgentCallbackPromise, + AgentCallbackReturn +} from './index'; + +type LegacyCallback = ( + req: ClientRequest, + opts: RequestOptions, + fn: AgentCallbackCallback +) => void; + +export default function promisify(fn: LegacyCallback): AgentCallbackPromise { + return function(this: Agent, req: ClientRequest, opts: RequestOptions) { + return new Promise((resolve, reject) => { + fn.call( + this, + req, + opts, + (err: Error | null | undefined, rtn?: AgentCallbackReturn) => { + if (err) { + reject(err); + } else { + resolve(rtn); + } + } + ); + }); + }; +} diff --git a/node_modules/http-proxy-agent/node_modules/debug/LICENSE b/node_modules/http-proxy-agent/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/http-proxy-agent/node_modules/debug/README.md b/node_modules/http-proxy-agent/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/http-proxy-agent/node_modules/debug/package.json b/node_modules/http-proxy-agent/node_modules/debug/package.json new file mode 100644 index 0000000..3bcdc24 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/http-proxy-agent/node_modules/debug/src/browser.js b/node_modules/http-proxy-agent/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/http-proxy-agent/node_modules/debug/src/common.js b/node_modules/http-proxy-agent/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/http-proxy-agent/node_modules/debug/src/index.js b/node_modules/http-proxy-agent/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/http-proxy-agent/node_modules/debug/src/node.js b/node_modules/http-proxy-agent/node_modules/debug/src/node.js new file mode 100644 index 0000000..79bc085 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/http-proxy-agent/node_modules/ms/index.js b/node_modules/http-proxy-agent/node_modules/ms/index.js new file mode 100644 index 0000000..c4498bc --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/http-proxy-agent/node_modules/ms/license.md b/node_modules/http-proxy-agent/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/http-proxy-agent/node_modules/ms/package.json b/node_modules/http-proxy-agent/node_modules/ms/package.json new file mode 100644 index 0000000..eea666e --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/http-proxy-agent/node_modules/ms/readme.md b/node_modules/http-proxy-agent/node_modules/ms/readme.md new file mode 100644 index 0000000..9a1996b --- /dev/null +++ b/node_modules/http-proxy-agent/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/http-proxy-agent/package.json b/node_modules/http-proxy-agent/package.json new file mode 100644 index 0000000..659d6e1 --- /dev/null +++ b/node_modules/http-proxy-agent/package.json @@ -0,0 +1,57 @@ +{ + "name": "http-proxy-agent", + "version": "5.0.0", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-http-proxy-agent.git" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" + }, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.19.2", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/https-proxy-agent/LICENSE b/node_modules/https-proxy-agent/LICENSE new file mode 100644 index 0000000..008728c --- /dev/null +++ b/node_modules/https-proxy-agent/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md new file mode 100644 index 0000000..ebc53cf --- /dev/null +++ b/node_modules/https-proxy-agent/README.md @@ -0,0 +1,70 @@ +https-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTPS + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `https` module. + +Specifically, this `Agent` implementation connects to an intermediary "proxy" +server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to +open a direct TCP connection to the destination server. + +Since this agent implements the CONNECT HTTP method, it also works with other +protocols that use this method when connecting over proxies (i.e. WebSockets). +See the "Examples" section below for more. + +Examples +-------- + +#### `https` module example + +```ts +import * as https from 'https'; +import { HttpsProxyAgent } from 'https-proxy-agent'; + +const agent = new HttpsProxyAgent('http://168.63.76.32:3128'); + +https.get('https://example.com', { agent }, (res) => { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `ws` WebSocket connection example + +```ts +import WebSocket from 'ws'; +import { HttpsProxyAgent } from 'https-proxy-agent'; + +const agent = new HttpsProxyAgent('http://168.63.76.32:3128'); +const socket = new WebSocket('ws://echo.websocket.org', { agent }); + +socket.on('open', function () { + console.log('"open" event!'); + socket.send('hello world'); +}); + +socket.on('message', function (data, flags) { + console.log('"message" event! %j %j', data, flags); + socket.close(); +}); +``` + +API +--- + +### new HttpsProxyAgent(proxy: string | URL, options?: HttpsProxyAgentOptions) + +The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects +to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket +requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT]. + +The `proxy` argument is the URL for the proxy server. + +The `options` argument accepts the usual `http.Agent` constructor options, and +some additional properties: + + * `headers` - Object containing additional headers to send to the proxy server + in the `CONNECT` request. + +[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling diff --git a/node_modules/https-proxy-agent/dist/index.d.ts b/node_modules/https-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..8cd1151 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.d.ts @@ -0,0 +1,47 @@ +/// +/// +/// +/// +import * as net from 'net'; +import * as tls from 'tls'; +import * as http from 'http'; +import { Agent, AgentConnectOpts } from 'agent-base'; +import { URL } from 'url'; +import type { OutgoingHttpHeaders } from 'http'; +type Protocol = T extends `${infer Protocol}:${infer _}` ? Protocol : never; +type ConnectOptsMap = { + http: Omit; + https: Omit; +}; +type ConnectOpts = { + [P in keyof ConnectOptsMap]: Protocol extends P ? ConnectOptsMap[P] : never; +}[keyof ConnectOptsMap]; +export type HttpsProxyAgentOptions = ConnectOpts & http.AgentOptions & { + headers?: OutgoingHttpHeaders | (() => OutgoingHttpHeaders); +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +export declare class HttpsProxyAgent extends Agent { + static protocols: readonly ["http", "https"]; + readonly proxy: URL; + proxyHeaders: OutgoingHttpHeaders | (() => OutgoingHttpHeaders); + connectOpts: net.TcpNetConnectOpts & tls.ConnectionOptions; + constructor(proxy: Uri | URL, opts?: HttpsProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + connect(req: http.ClientRequest, opts: AgentConnectOpts): Promise; +} +export {}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.d.ts.map b/node_modules/https-proxy-agent/dist/index.d.ts.map new file mode 100644 index 0000000..f21239c --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAAA,OAAO,KAAK,GAAG,MAAM,KAAK,CAAC;AAC3B,OAAO,KAAK,GAAG,MAAM,KAAK,CAAC;AAC3B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAG7B,OAAO,EAAE,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AACrD,OAAO,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC;AAE1B,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,MAAM,CAAC;AAKhD,KAAK,QAAQ,CAAC,CAAC,IAAI,CAAC,SAAS,GAAG,MAAM,QAAQ,IAAI,MAAM,CAAC,EAAE,GAAG,QAAQ,GAAG,KAAK,CAAC;AAE/E,KAAK,cAAc,GAAG;IACrB,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC;IACnD,KAAK,EAAE,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC;CACpD,CAAC;AAEF,KAAK,WAAW,CAAC,CAAC,IAAI;KACpB,CAAC,IAAI,MAAM,cAAc,GAAG,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC,GAC/C,cAAc,CAAC,CAAC,CAAC,GACjB,KAAK;CACR,CAAC,MAAM,cAAc,CAAC,CAAC;AAExB,MAAM,MAAM,sBAAsB,CAAC,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,GACrD,IAAI,CAAC,YAAY,GAAG;IACnB,OAAO,CAAC,EAAE,mBAAmB,GAAG,CAAC,MAAM,mBAAmB,CAAC,CAAC;CAC5D,CAAC;AAEH;;;;;;;;;;;GAWG;AACH,qBAAa,eAAe,CAAC,GAAG,SAAS,MAAM,CAAE,SAAQ,KAAK;IAC7D,MAAM,CAAC,SAAS,6BAA8B;IAE9C,QAAQ,CAAC,KAAK,EAAE,GAAG,CAAC;IACpB,YAAY,EAAE,mBAAmB,GAAG,CAAC,MAAM,mBAAmB,CAAC,CAAC;IAChE,WAAW,EAAE,GAAG,CAAC,iBAAiB,GAAG,GAAG,CAAC,iBAAiB,CAAC;gBAE/C,KAAK,EAAE,GAAG,GAAG,GAAG,EAAE,IAAI,CAAC,EAAE,sBAAsB,CAAC,GAAG,CAAC;IA0BhE;;;OAGG;IACG,OAAO,CACZ,GAAG,EAAE,IAAI,CAAC,aAAa,EACvB,IAAI,EAAE,gBAAgB,GACpB,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC;CA2GtB"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.js b/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 0000000..4c420aa --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,175 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpsProxyAgent = void 0; +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const url_1 = require("url"); +const parse_proxy_response_1 = require("./parse-proxy-response"); +const debug = (0, debug_1.default)('https-proxy-agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername: servername && net.isIP(servername) ? undefined : servername, + }); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.js.map b/node_modules/https-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..909f6cf --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,yCAA2B;AAC3B,yCAA2B;AAE3B,oDAA4B;AAC5B,kDAAgC;AAChC,2CAAqD;AACrD,6BAA0B;AAC1B,iEAA4D;AAG5D,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,mBAAmB,CAAC,CAAC;AAqB/C;;;;;;;;;;;GAWG;AACH,MAAa,eAAoC,SAAQ,kBAAK;IAO7D,YAAY,KAAgB,EAAE,IAAkC;QAC/D,KAAK,CAAC,IAAI,CAAC,CAAC;QACZ,IAAI,CAAC,OAAO,GAAG,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;QACnC,IAAI,CAAC,KAAK,GAAG,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,SAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;QAChE,IAAI,CAAC,YAAY,GAAG,IAAI,EAAE,OAAO,IAAI,EAAE,CAAC;QACxC,KAAK,CAAC,2CAA2C,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAEpE,4CAA4C;QAC5C,MAAM,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,OAAO,CAC5D,UAAU,EACV,EAAE,CACF,CAAC;QACF,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI;YAC3B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC;YAC/B,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,KAAK,QAAQ;gBAClC,CAAC,CAAC,GAAG;gBACL,CAAC,CAAC,EAAE,CAAC;QACN,IAAI,CAAC,WAAW,GAAG;YAClB,sEAAsE;YACtE,aAAa,EAAE,CAAC,UAAU,CAAC;YAC3B,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;YACxC,IAAI;YACJ,IAAI;SACJ,CAAC;IACH,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,OAAO,CACZ,GAAuB,EACvB,IAAsB;QAEtB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;QAEvB,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE;YACf,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;SAC1C;QAED,kDAAkD;QAClD,IAAI,MAAkB,CAAC;QACvB,IAAI,KAAK,CAAC,QAAQ,KAAK,QAAQ,EAAE;YAChC,KAAK,CAAC,2BAA2B,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;YACrD,MAAM,UAAU,GACf,IAAI,CAAC,WAAW,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;YACtD,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC;gBACpB,GAAG,IAAI,CAAC,WAAW;gBACnB,UAAU,EACT,UAAU,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;aAC5D,CAAC,CAAC;SACH;aAAM;YACN,KAAK,CAAC,2BAA2B,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;YACrD,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SACvC;QAED,MAAM,OAAO,GACZ,OAAO,IAAI,CAAC,YAAY,KAAK,UAAU;YACtC,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE;YACrB,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QAC7B,MAAM,IAAI,GAAG,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QAClE,IAAI,OAAO,GAAG,WAAW,IAAI,IAAI,IAAI,CAAC,IAAI,eAAe,CAAC;QAE1D,wDAAwD;QACxD,IAAI,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,QAAQ,EAAE;YACrC,MAAM,IAAI,GAAG,GAAG,kBAAkB,CACjC,KAAK,CAAC,QAAQ,CACd,IAAI,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC1C,OAAO,CAAC,qBAAqB,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACpD,IAAI,CACJ,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;SACvB;QAED,OAAO,CAAC,IAAI,GAAG,GAAG,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;QAEtC,IAAI,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;YACjC,OAAO,CAAC,kBAAkB,CAAC,GAAG,IAAI,CAAC,SAAS;gBAC3C,CAAC,CAAC,YAAY;gBACd,CAAC,CAAC,OAAO,CAAC;SACX;QACD,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;YACxC,OAAO,IAAI,GAAG,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;SAC3C;QAED,MAAM,oBAAoB,GAAG,IAAA,yCAAkB,EAAC,MAAM,CAAC,CAAC;QAExD,MAAM,CAAC,KAAK,CAAC,GAAG,OAAO,MAAM,CAAC,CAAC;QAE/B,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,MAAM,oBAAoB,CAAC;QACzD,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC;QAClC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,OAAO,EAAE,GAAG,CAAC,CAAC;QAExC,IAAI,OAAO,CAAC,UAAU,KAAK,GAAG,EAAE;YAC/B,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;YAE3B,IAAI,IAAI,CAAC,cAAc,EAAE;gBACxB,sDAAsD;gBACtD,8CAA8C;gBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;gBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC;gBAChD,OAAO,GAAG,CAAC,OAAO,CAAC;oBAClB,GAAG,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC;oBACrC,MAAM;oBACN,UAAU,EAAE,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU;iBACzD,CAAC,CAAC;aACH;YAED,OAAO,MAAM,CAAC;SACd;QAED,oEAAoE;QACpE,kEAAkE;QAClE,iEAAiE;QACjE,qBAAqB;QAErB,iEAAiE;QACjE,0DAA0D;QAC1D,oEAAoE;QACpE,mBAAmB;QACnB,EAAE;QACF,4CAA4C;QAC5C,MAAM,CAAC,OAAO,EAAE,CAAC;QAEjB,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;QACvD,UAAU,CAAC,QAAQ,GAAG,IAAI,CAAC;QAE3B,oEAAoE;QACpE,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAa,EAAE,EAAE;YACpC,KAAK,CAAC,2CAA2C,CAAC,CAAC;YACnD,IAAA,gBAAM,EAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YAEpC,gEAAgE;YAChE,8DAA8D;YAC9D,YAAY;YACZ,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YACjB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,OAAO,UAAU,CAAC;IACnB,CAAC;;AAjJM,yBAAS,GAAG,CAAC,MAAM,EAAE,OAAO,CAAU,CAAC;AADlC,0CAAe;AAqJ5B,SAAS,MAAM,CAAC,MAAkC;IACjD,MAAM,CAAC,MAAM,EAAE,CAAC;AACjB,CAAC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts new file mode 100644 index 0000000..84d5a9c --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { IncomingHttpHeaders } from 'http'; +import { Readable } from 'stream'; +export interface ConnectResponse { + statusCode: number; + statusText: string; + headers: IncomingHttpHeaders; +} +export declare function parseProxyResponse(socket: Readable): Promise<{ + connect: ConnectResponse; + buffered: Buffer; +}>; +//# sourceMappingURL=parse-proxy-response.d.ts.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts.map b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts.map new file mode 100644 index 0000000..414df55 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-proxy-response.d.ts","sourceRoot":"","sources":["../src/parse-proxy-response.ts"],"names":[],"mappings":";;;AACA,OAAO,EAAE,mBAAmB,EAAE,MAAM,MAAM,CAAC;AAC3C,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAIlC,MAAM,WAAW,eAAe;IAC/B,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,mBAAmB,CAAC;CAC7B;AAED,wBAAgB,kBAAkB,CACjC,MAAM,EAAE,QAAQ,GACd,OAAO,CAAC;IAAE,OAAO,EAAE,eAAe,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAE,CAAC,CAyGzD"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 0000000..d3f506f --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,101 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(require("debug")); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map new file mode 100644 index 0000000..71b58bb --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-proxy-response.js","sourceRoot":"","sources":["../src/parse-proxy-response.ts"],"names":[],"mappings":";;;;;;AAAA,kDAAgC;AAIhC,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,wCAAwC,CAAC,CAAC;AAQpE,SAAgB,kBAAkB,CACjC,MAAgB;IAEhB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,+EAA+E;QAC/E,gFAAgF;QAChF,8EAA8E;QAC9E,8BAA8B;QAC9B,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,SAAS,IAAI;YACZ,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YACxB,IAAI,CAAC;gBAAE,MAAM,CAAC,CAAC,CAAC,CAAC;;gBACZ,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,OAAO;YACf,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YACpC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACzC,CAAC;QAED,SAAS,KAAK;YACb,OAAO,EAAE,CAAC;YACV,KAAK,CAAC,OAAO,CAAC,CAAC;YACf,MAAM,CACL,IAAI,KAAK,CACR,0DAA0D,CAC1D,CACD,CAAC;QACH,CAAC;QAED,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,KAAK,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QAED,SAAS,MAAM,CAAC,CAAS;YACxB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAChB,aAAa,IAAI,CAAC,CAAC,MAAM,CAAC;YAE1B,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACvD,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YAElD,IAAI,YAAY,KAAK,CAAC,CAAC,EAAE;gBACxB,iBAAiB;gBACjB,KAAK,CAAC,8CAA8C,CAAC,CAAC;gBACtD,IAAI,EAAE,CAAC;gBACP,OAAO;aACP;YAED,MAAM,WAAW,GAAG,QAAQ;iBAC1B,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC;iBACtB,QAAQ,CAAC,OAAO,CAAC;iBACjB,KAAK,CAAC,MAAM,CAAC,CAAC;YAChB,MAAM,SAAS,GAAG,WAAW,CAAC,KAAK,EAAE,CAAC;YACtC,IAAI,CAAC,SAAS,EAAE;gBACf,MAAM,CAAC,OAAO,EAAE,CAAC;gBACjB,OAAO,MAAM,CACZ,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAC3D,CAAC;aACF;YACD,MAAM,cAAc,GAAG,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAC5C,MAAM,UAAU,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;YACtC,MAAM,UAAU,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACrD,MAAM,OAAO,GAAwB,EAAE,CAAC;YACxC,KAAK,MAAM,MAAM,IAAI,WAAW,EAAE;gBACjC,IAAI,CAAC,MAAM;oBAAE,SAAS;gBACtB,MAAM,UAAU,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBACvC,IAAI,UAAU,KAAK,CAAC,CAAC,EAAE;oBACtB,MAAM,CAAC,OAAO,EAAE,CAAC;oBACjB,OAAO,MAAM,CACZ,IAAI,KAAK,CACR,gDAAgD,MAAM,GAAG,CACzD,CACD,CAAC;iBACF;gBACD,MAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,WAAW,EAAE,CAAC;gBACtD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC;gBACvD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;gBAC7B,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;oBAChC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;iBAChC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;oBAClC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;iBACpB;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;iBACrB;aACD;YACD,KAAK,CAAC,kCAAkC,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;YAC9D,OAAO,EAAE,CAAC;YACV,OAAO,CAAC;gBACP,OAAO,EAAE;oBACR,UAAU;oBACV,UAAU;oBACV,OAAO;iBACP;gBACD,QAAQ;aACR,CAAC,CAAC;QACJ,CAAC;QAED,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QAExB,IAAI,EAAE,CAAC;IACR,CAAC,CAAC,CAAC;AACJ,CAAC;AA3GD,gDA2GC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/node_modules/debug/LICENSE b/node_modules/https-proxy-agent/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/https-proxy-agent/node_modules/debug/README.md b/node_modules/https-proxy-agent/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/https-proxy-agent/node_modules/debug/package.json b/node_modules/https-proxy-agent/node_modules/debug/package.json new file mode 100644 index 0000000..3bcdc24 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/browser.js b/node_modules/https-proxy-agent/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/common.js b/node_modules/https-proxy-agent/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/index.js b/node_modules/https-proxy-agent/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/node.js b/node_modules/https-proxy-agent/node_modules/debug/src/node.js new file mode 100644 index 0000000..79bc085 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/https-proxy-agent/node_modules/ms/index.js b/node_modules/https-proxy-agent/node_modules/ms/index.js new file mode 100644 index 0000000..c4498bc --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/https-proxy-agent/node_modules/ms/license.md b/node_modules/https-proxy-agent/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/https-proxy-agent/node_modules/ms/package.json b/node_modules/https-proxy-agent/node_modules/ms/package.json new file mode 100644 index 0000000..eea666e --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/https-proxy-agent/node_modules/ms/readme.md b/node_modules/https-proxy-agent/node_modules/ms/readme.md new file mode 100644 index 0000000..9a1996b --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/https-proxy-agent/package.json b/node_modules/https-proxy-agent/package.json new file mode 100644 index 0000000..f3c67ef --- /dev/null +++ b/node_modules/https-proxy-agent/package.json @@ -0,0 +1,50 @@ +{ + "name": "https-proxy-agent", + "version": "7.0.4", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/https-proxy-agent" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "devDependencies": { + "@types/async-retry": "^1.4.5", + "@types/debug": "4", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "async-listen": "^3.0.0", + "async-retry": "^1.3.3", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "proxy": "2.1.1", + "tsconfig": "0.0.0" + }, + "engines": { + "node": ">= 14" + }, + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail test/test.ts", + "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts", + "lint": "eslint --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/node_modules/is-stream/index.d.ts b/node_modules/is-stream/index.d.ts new file mode 100644 index 0000000..eee2e83 --- /dev/null +++ b/node_modules/is-stream/index.d.ts @@ -0,0 +1,79 @@ +import * as stream from 'stream'; + +declare const isStream: { + /** + @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream(fs.createReadStream('unicorn.png')); + //=> true + + isStream({}); + //=> false + ``` + */ + (stream: unknown): stream is stream.Stream; + + /** + @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.writable(fs.createWriteStrem('unicorn.txt')); + //=> true + ``` + */ + writable(stream: unknown): stream is stream.Writable; + + /** + @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.readable(fs.createReadStream('unicorn.png')); + //=> true + ``` + */ + readable(stream: unknown): stream is stream.Readable; + + /** + @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + + @example + ``` + import {Duplex} from 'stream'; + import isStream = require('is-stream'); + + isStream.duplex(new Duplex()); + //=> true + ``` + */ + duplex(stream: unknown): stream is stream.Duplex; + + /** + @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + + @example + ``` + import * as fs from 'fs'; + import Stringify = require('streaming-json-stringify'); + import isStream = require('is-stream'); + + isStream.transform(Stringify()); + //=> true + ``` + */ + transform(input: unknown): input is stream.Transform; +}; + +export = isStream; diff --git a/node_modules/is-stream/index.js b/node_modules/is-stream/index.js new file mode 100644 index 0000000..2e43434 --- /dev/null +++ b/node_modules/is-stream/index.js @@ -0,0 +1,28 @@ +'use strict'; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; diff --git a/node_modules/is-stream/license b/node_modules/is-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/is-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-stream/package.json b/node_modules/is-stream/package.json new file mode 100644 index 0000000..c3b5673 --- /dev/null +++ b/node_modules/is-stream/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-stream", + "version": "2.0.1", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "@types/node": "^11.13.6", + "ava": "^1.4.1", + "tempy": "^0.3.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-stream/readme.md b/node_modules/is-stream/readme.md new file mode 100644 index 0000000..19308e7 --- /dev/null +++ b/node_modules/is-stream/readme.md @@ -0,0 +1,60 @@ +# is-stream + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + +## Install + +``` +$ npm install is-stream +``` + +## Usage + +```js +const fs = require('fs'); +const isStream = require('is-stream'); + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + +## API + +### isStream(stream) + +Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +#### isStream.writable(stream) + +Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +#### isStream.readable(stream) + +Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +#### isStream.duplex(stream) + +Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +#### isStream.transform(stream) + +Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +## Related + +- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream + +--- + +

+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/json-bigint/LICENSE b/node_modules/json-bigint/LICENSE new file mode 100644 index 0000000..9ab4d83 --- /dev/null +++ b/node_modules/json-bigint/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Andrey Sidorov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/json-bigint/README.md b/node_modules/json-bigint/README.md new file mode 100644 index 0000000..e7335c7 --- /dev/null +++ b/node_modules/json-bigint/README.md @@ -0,0 +1,240 @@ +# json-bigint + +[![Build Status](https://secure.travis-ci.org/sidorares/json-bigint.png)](http://travis-ci.org/sidorares/json-bigint) +[![NPM](https://nodei.co/npm/json-bigint.png?downloads=true&stars=true)](https://nodei.co/npm/json-bigint/) + +JSON.parse/stringify with bigints support. Based on Douglas Crockford [JSON.js](https://github.com/douglascrockford/JSON-js) package and [bignumber.js](https://github.com/MikeMcl/bignumber.js) library. + +Native `Bigint` was added to JS recently, so we added an option to leverage it instead of `bignumber.js`. However, the parsing with native `BigInt` is kept an option for backward compability. + +While most JSON parsers assume numeric values have same precision restrictions as IEEE 754 double, JSON specification _does not_ say anything about number precision. Any floating point number in decimal (optionally scientific) notation is valid JSON value. It's a good idea to serialize values which might fall out of IEEE 754 integer precision as strings in your JSON api, but `{ "value" : 9223372036854775807}`, for example, is still a valid RFC4627 JSON string, and in most JS runtimes the result of `JSON.parse` is this object: `{ value: 9223372036854776000 }` + +========== + +example: + +```js +var JSONbig = require('json-bigint'); + +var json = '{ "value" : 9223372036854775807, "v2": 123 }'; +console.log('Input:', json); +console.log(''); + +console.log('node.js built-in JSON:'); +var r = JSON.parse(json); +console.log('JSON.parse(input).value : ', r.value.toString()); +console.log('JSON.stringify(JSON.parse(input)):', JSON.stringify(r)); + +console.log('\n\nbig number JSON:'); +var r1 = JSONbig.parse(json); +console.log('JSONbig.parse(input).value : ', r1.value.toString()); +console.log('JSONbig.stringify(JSONbig.parse(input)):', JSONbig.stringify(r1)); +``` + +Output: + +``` +Input: { "value" : 9223372036854775807, "v2": 123 } + +node.js built-in JSON: +JSON.parse(input).value : 9223372036854776000 +JSON.stringify(JSON.parse(input)): {"value":9223372036854776000,"v2":123} + + +big number JSON: +JSONbig.parse(input).value : 9223372036854775807 +JSONbig.stringify(JSONbig.parse(input)): {"value":9223372036854775807,"v2":123} +``` + +### Options + +The behaviour of the parser is somewhat configurable through 'options' + +#### options.strict, boolean, default false + +Specifies the parsing should be "strict" towards reporting duplicate-keys in the parsed string. +The default follows what is allowed in standard json and resembles the behavior of JSON.parse, but overwrites any previous values with the last one assigned to the duplicate-key. + +Setting options.strict = true will fail-fast on such duplicate-key occurances and thus warn you upfront of possible lost information. + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONstrict = require('json-bigint')({ strict: true }); + +var dupkeys = '{ "dupkey": "value 1", "dupkey": "value 2"}'; +console.log('\n\nDuplicate Key test with both lenient and strict JSON parsing'); +console.log('Input:', dupkeys); +var works = JSONbig.parse(dupkeys); +console.log('JSON.parse(dupkeys).dupkey: %s', works.dupkey); +var fails = 'will stay like this'; +try { + fails = JSONstrict.parse(dupkeys); + console.log('ERROR!! Should never get here'); +} catch (e) { + console.log( + 'Succesfully catched expected exception on duplicate keys: %j', + e + ); +} +``` + +Output + +``` +Duplicate Key test with big number JSON +Input: { "dupkey": "value 1", "dupkey": "value 2"} +JSON.parse(dupkeys).dupkey: value 2 +Succesfully catched expected exception on duplicate keys: {"name":"SyntaxError","message":"Duplicate key \"dupkey\"","at":33,"text":"{ \"dupkey\": \"value 1\", \"dupkey\": \"value 2\"}"} + +``` + +#### options.storeAsString, boolean, default false + +Specifies if BigInts should be stored in the object as a string, rather than the default BigNumber. + +Note that this is a dangerous behavior as it breaks the default functionality of being able to convert back-and-forth without data type changes (as this will convert all BigInts to be-and-stay strings). + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigString = require('json-bigint')({ storeAsString: true }); +var key = '{ "key": 1234567890123456789 }'; +console.log('\n\nStoring the BigInt as a string, instead of a BigNumber'); +console.log('Input:', key); +var withInt = JSONbig.parse(key); +var withString = JSONbigString.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof withInt.key, + typeof withString.key +); +``` + +Output + +``` +Storing the BigInt as a string, instead of a BigNumber +Input: { "key": 1234567890123456789 } +Default type: object, With option type: string + +``` + +#### options.useNativeBigInt, boolean, default false + +Specifies if parser uses native BigInt instead of bignumber.js + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigNative = require('json-bigint')({ useNativeBigInt: true }); +var key = '{ "key": 993143214321423154315154321 }'; +console.log(`\n\nStoring the Number as native BigInt, instead of a BigNumber`); +console.log('Input:', key); +var normal = JSONbig.parse(key); +var nativeBigInt = JSONbigNative.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof normal.key, + typeof nativeBigInt.key +); +``` + +Output + +``` +Storing the Number as native BigInt, instead of a BigNumber +Input: { "key": 993143214321423154315154321 } +Default type: object, With option type: bigint + +``` + +#### options.alwaysParseAsBig, boolean, default false + +Specifies if all numbers should be stored as BigNumber. + +Note that this is a dangerous behavior as it breaks the default functionality of being able to convert back-and-forth without data type changes (as this will convert all Number to be-and-stay BigNumber) + +example: + +```js +var JSONbig = require('json-bigint'); +var JSONbigAlways = require('json-bigint')({ alwaysParseAsBig: true }); +var key = '{ "key": 123 }'; // there is no need for BigNumber by default, but we're forcing it +console.log(`\n\nStoring the Number as a BigNumber, instead of a Number`); +console.log('Input:', key); +var normal = JSONbig.parse(key); +var always = JSONbigAlways.parse(key); +console.log( + 'Default type: %s, With option type: %s', + typeof normal.key, + typeof always.key +); +``` + +Output + +``` +Storing the Number as a BigNumber, instead of a Number +Input: { "key": 123 } +Default type: number, With option type: object + +``` + +If you want to force all numbers to be parsed as native `BigInt` +(you probably do! Otherwise any calulations become a real headache): + +```js +var JSONbig = require('json-bigint')({ + alwaysParseAsBig: true, + useNativeBigInt: true, +}); +``` + +#### options.protoAction, boolean, default: "error". Possible values: "error", "ignore", "preserve" + +#### options.constructorAction, boolean, default: "error". Possible values: "error", "ignore", "preserve" + +Controls how `__proto__` and `constructor` properties are treated. If set to "error" they are not allowed and +parse() call will throw an error. If set to "ignore" the prroperty and it;s value is skipped from parsing and object building. +If set to "preserve" the `__proto__` property is set. One should be extra careful and make sure any other library consuming generated data +is not vulnerable to prototype poisoning attacks. + +example: + +```js +var JSONbigAlways = require('json-bigint')({ protoAction: 'ignore' }); +const user = JSONbig.parse('{ "__proto__": { "admin": true }, "id": 12345 }'); +// => result is { id: 12345 } +``` + +### Links: + +- [RFC4627: The application/json Media Type for JavaScript Object Notation (JSON)](http://www.ietf.org/rfc/rfc4627.txt) +- [Re: \[Json\] Limitations on number size?](http://www.ietf.org/mail-archive/web/json/current/msg00297.html) +- [Is there any proper way to parse JSON with large numbers? (long, bigint, int64)](http://stackoverflow.com/questions/18755125/node-js-is-there-any-proper-way-to-parse-json-with-large-numbers-long-bigint) +- [What is JavaScript's Max Int? What's the highest Integer value a Number can go to without losing precision?](http://stackoverflow.com/questions/307179/what-is-javascripts-max-int-whats-the-highest-integer-value-a-number-can-go-t) +- [Large numbers erroneously rounded in Javascript](http://stackoverflow.com/questions/1379934/large-numbers-erroneously-rounded-in-javascript) + +### Note on native BigInt support + +#### Stringifying + +Full support out-of-the-box, stringifies BigInts as pure numbers (no quotes, no `n`) + +#### Limitations + +- Roundtrip operations + +`s === JSONbig.stringify(JSONbig.parse(s))` but + +`o !== JSONbig.parse(JSONbig.stringify(o))` + +when `o` has a value with something like `123n`. + +`JSONbig` stringify `123n` as `123`, which becomes `number` (aka `123` not `123n`) by default when being reparsed. + +There is currently no consistent way to deal with this issue, so we decided to leave it, handling this specific case is then up to users. diff --git a/node_modules/json-bigint/index.js b/node_modules/json-bigint/index.js new file mode 100644 index 0000000..4757600 --- /dev/null +++ b/node_modules/json-bigint/index.js @@ -0,0 +1,12 @@ +var json_stringify = require('./lib/stringify.js').stringify; +var json_parse = require('./lib/parse.js'); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; diff --git a/node_modules/json-bigint/lib/parse.js b/node_modules/json-bigint/lib/parse.js new file mode 100644 index 0000000..bb4e5eb --- /dev/null +++ b/node_modules/json-bigint/lib/parse.js @@ -0,0 +1,443 @@ +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = require('bignumber.js'); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; diff --git a/node_modules/json-bigint/lib/stringify.js b/node_modules/json-bigint/lib/stringify.js new file mode 100644 index 0000000..3bd5269 --- /dev/null +++ b/node_modules/json-bigint/lib/stringify.js @@ -0,0 +1,384 @@ +var BigNumber = require('bignumber.js'); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); diff --git a/node_modules/json-bigint/package.json b/node_modules/json-bigint/package.json new file mode 100644 index 0000000..9309f6b --- /dev/null +++ b/node_modules/json-bigint/package.json @@ -0,0 +1,34 @@ +{ + "name": "json-bigint", + "version": "1.0.0", + "description": "JSON.parse with bigints support", + "main": "index.js", + "files": [ + "index.js", + "lib/parse.js", + "lib/stringify.js" + ], + "scripts": { + "test": "./node_modules/mocha/bin/mocha -R spec --check-leaks test/*-test.js" + }, + "repository": { + "type": "git", + "url": "git@github.com:sidorares/json-bigint.git" + }, + "keywords": [ + "JSON", + "bigint", + "bignumber", + "parse", + "json" + ], + "author": "Andrey Sidorov ", + "license": "MIT", + "dependencies": { + "bignumber.js": "^9.0.0" + }, + "devDependencies": { + "chai": "4.2.0", + "mocha": "8.0.1" + } +} diff --git a/node_modules/jwa/LICENSE b/node_modules/jwa/LICENSE new file mode 100644 index 0000000..caeb849 --- /dev/null +++ b/node_modules/jwa/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/jwa/README.md b/node_modules/jwa/README.md new file mode 100644 index 0000000..09e9648 --- /dev/null +++ b/node_modules/jwa/README.md @@ -0,0 +1,150 @@ +# node-jwa [![Build Status](https://travis-ci.org/brianloveswords/node-jwa.svg?branch=master)](https://travis-ci.org/brianloveswords/node-jwa) + +A +[JSON Web Algorithms](http://tools.ietf.org/id/draft-ietf-jose-json-web-algorithms-08.html) +implementation focusing (exclusively, at this point) on the algorithms necessary for +[JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This library supports all of the required, recommended and optional cryptographic algorithms for JWS: + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +Please note that PS* only works on Node 6.12+ (excluding 7.x). + +# Requirements + +In order to run the tests, a recent version of OpenSSL is +required. **The version that comes with OS X (OpenSSL 0.9.8r 8 Feb +2011) is not recent enough**, as it does not fully support ECDSA +keys. You'll need to use a version > 1.0.0; I tested with OpenSSL 1.0.1c 10 May 2012. + +# Testing + +To run the tests, do + +```bash +$ npm test +``` + +This will generate a bunch of keypairs to use in testing. If you want to +generate new keypairs, do `make clean` before running `npm test` again. + +## Methodology + +I spawn `openssl dgst -sign` to test OpenSSL sign → JS verify and +`openssl dgst -verify` to test JS sign → OpenSSL verify for each of the +RSA and ECDSA algorithms. + +# Usage + +## jwa(algorithm) + +Creates a new `jwa` object with `sign` and `verify` methods for the +algorithm. Valid values for algorithm can be found in the table above +(`'HS256'`, `'HS384'`, etc) and are case-sensitive. Passing an invalid +algorithm value will throw a `TypeError`. + + +## jwa#sign(input, secretOrPrivateKey) + +Sign some input with either a secret for HMAC algorithms, or a private +key for RSA and ECDSA algorithms. + +If input is not already a string or buffer, `JSON.stringify` will be +called on it to attempt to coerce it. + +For the HMAC algorithm, `secretOrPrivateKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string representing a +PEM encoded **private** key. + +Output [base64url](http://en.wikipedia.org/wiki/Base64#URL_applications) +formatted. This is for convenience as JWS expects the signature in this +format. If your application needs the output in a different format, +[please open an issue](https://github.com/brianloveswords/node-jwa/issues). In +the meantime, you can use +[brianloveswords/base64url](https://github.com/brianloveswords/base64url) +to decode the signature. + +As of nodejs *v0.11.8*, SPKAC support was introduce. If your nodeJs +version satisfies, then you can pass an object `{ key: '..', passphrase: '...' }` + + +## jwa#verify(input, signature, secretOrPublicKey) + +Verify a signature. Returns `true` or `false`. + +`signature` should be a base64url encoded string. + +For the HMAC algorithm, `secretOrPublicKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string represented a +PEM encoded **public** key. + + +# Example + +HMAC +```js +const jwa = require('jwa'); + +const hmac = jwa('HS256'); +const input = 'super important stuff'; +const secret = 'shhhhhh'; + +const signature = hmac.sign(input, secret); +hmac.verify(input, signature, secret) // === true +hmac.verify(input, signature, 'trickery!') // === false +``` + +With keys +```js +const fs = require('fs'); +const jwa = require('jwa'); +const privateKey = fs.readFileSync(__dirname + '/ecdsa-p521-private.pem'); +const publicKey = fs.readFileSync(__dirname + '/ecdsa-p521-public.pem'); + +const ecdsa = jwa('ES512'); +const input = 'very important stuff'; + +const signature = ecdsa.sign(input, privateKey); +ecdsa.verify(input, signature, publicKey) // === true +``` +## License + +MIT + +``` +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` diff --git a/node_modules/jwa/index.js b/node_modules/jwa/index.js new file mode 100644 index 0000000..d2061ef --- /dev/null +++ b/node_modules/jwa/index.js @@ -0,0 +1,252 @@ +var bufferEqual = require('buffer-equal-constant-time'); +var Buffer = require('safe-buffer').Buffer; +var crypto = require('crypto'); +var formatEcdsa = require('ecdsa-sig-formatter'); +var util = require('util'); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; diff --git a/node_modules/jwa/package.json b/node_modules/jwa/package.json new file mode 100644 index 0000000..7b3d5e5 --- /dev/null +++ b/node_modules/jwa/package.json @@ -0,0 +1,37 @@ +{ + "name": "jwa", + "version": "2.0.0", + "description": "JWA implementation (supports all JWS algorithms)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "base64url": "^2.0.0", + "jwk-to-pem": "^2.0.1", + "semver": "4.3.6", + "tap": "6.2.0" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jwa.git" + }, + "keywords": [ + "jwa", + "jws", + "jwt", + "rsa", + "ecdsa", + "hmac" + ], + "author": "Brian J. Brennan ", + "license": "MIT" +} diff --git a/node_modules/jws/CHANGELOG.md b/node_modules/jws/CHANGELOG.md new file mode 100644 index 0000000..af8fc28 --- /dev/null +++ b/node_modules/jws/CHANGELOG.md @@ -0,0 +1,34 @@ +# Change Log +All notable changes to this project will be documented in this file. + +## [3.0.0] +### Changed +- **BREAKING**: `jwt.verify` now requires an `algorithm` parameter, and + `jws.createVerify` requires an `algorithm` option. The `"alg"` field + signature headers is ignored. This mitigates a critical security flaw + in the library which would allow an attacker to generate signatures with + arbitrary contents that would be accepted by `jwt.verify`. See + https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/ + for details. + +## [2.0.0] - 2015-01-30 +### Changed +- **BREAKING**: Default payload encoding changed from `binary` to + `utf8`. `utf8` is a is a more sensible default than `binary` because + many payloads, as far as I can tell, will contain user-facing + strings that could be in any language. ([6b6de48]) + +- Code reorganization, thanks [@fearphage]! ([7880050]) + +### Added +- Option in all relevant methods for `encoding`. For those few users + that might be depending on a `binary` encoding of the messages, this + is for them. ([6b6de48]) + +[unreleased]: https://github.com/brianloveswords/node-jws/compare/v2.0.0...HEAD +[2.0.0]: https://github.com/brianloveswords/node-jws/compare/v1.0.1...v2.0.0 + +[7880050]: https://github.com/brianloveswords/node-jws/commit/7880050 +[6b6de48]: https://github.com/brianloveswords/node-jws/commit/6b6de48 + +[@fearphage]: https://github.com/fearphage diff --git a/node_modules/jws/LICENSE b/node_modules/jws/LICENSE new file mode 100644 index 0000000..caeb849 --- /dev/null +++ b/node_modules/jws/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/jws/index.js b/node_modules/jws/index.js new file mode 100644 index 0000000..8c8da93 --- /dev/null +++ b/node_modules/jws/index.js @@ -0,0 +1,22 @@ +/*global exports*/ +var SignStream = require('./lib/sign-stream'); +var VerifyStream = require('./lib/verify-stream'); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; diff --git a/node_modules/jws/lib/data-stream.js b/node_modules/jws/lib/data-stream.js new file mode 100644 index 0000000..3535d31 --- /dev/null +++ b/node_modules/jws/lib/data-stream.js @@ -0,0 +1,55 @@ +/*global module, process*/ +var Buffer = require('safe-buffer').Buffer; +var Stream = require('stream'); +var util = require('util'); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; diff --git a/node_modules/jws/lib/sign-stream.js b/node_modules/jws/lib/sign-stream.js new file mode 100644 index 0000000..6a7ee42 --- /dev/null +++ b/node_modules/jws/lib/sign-stream.js @@ -0,0 +1,78 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; diff --git a/node_modules/jws/lib/tostring.js b/node_modules/jws/lib/tostring.js new file mode 100644 index 0000000..f5a49a3 --- /dev/null +++ b/node_modules/jws/lib/tostring.js @@ -0,0 +1,10 @@ +/*global module*/ +var Buffer = require('buffer').Buffer; + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; diff --git a/node_modules/jws/lib/verify-stream.js b/node_modules/jws/lib/verify-stream.js new file mode 100644 index 0000000..39f7c73 --- /dev/null +++ b/node_modules/jws/lib/verify-stream.js @@ -0,0 +1,120 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; diff --git a/node_modules/jws/package.json b/node_modules/jws/package.json new file mode 100644 index 0000000..87d4fcc --- /dev/null +++ b/node_modules/jws/package.json @@ -0,0 +1,34 @@ +{ + "name": "jws", + "version": "4.0.0", + "description": "Implementation of JSON Web Signatures", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jws.git" + }, + "keywords": [ + "jws", + "json", + "web", + "signatures" + ], + "author": "Brian J Brennan", + "license": "MIT", + "readmeFilename": "readme.md", + "gitHead": "c0f6b27bcea5a2ad2e304d91c2e842e4076a6b03", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "semver": "^5.1.0", + "tape": "~2.14.0" + } +} diff --git a/node_modules/jws/readme.md b/node_modules/jws/readme.md new file mode 100644 index 0000000..2f32dca --- /dev/null +++ b/node_modules/jws/readme.md @@ -0,0 +1,255 @@ +# node-jws [![Build Status](https://secure.travis-ci.org/brianloveswords/node-jws.svg)](http://travis-ci.org/brianloveswords/node-jws) + +An implementation of [JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This was developed against `draft-ietf-jose-json-web-signature-08` and +implements the entire spec **except** X.509 Certificate Chain +signing/verifying (patches welcome). + +There are both synchronous (`jws.sign`, `jws.verify`) and streaming +(`jws.createSign`, `jws.createVerify`) APIs. + +# Install + +```bash +$ npm install jws +``` + +# Usage + +## jws.ALGORITHMS + +Array of supported algorithms. The following algorithms are currently supported. + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +## jws.sign(options) + +(Synchronous) Return a JSON Web Signature for a header and a payload. + +Options: + +* `header` +* `payload` +* `secret` or `privateKey` +* `encoding` (Optional, defaults to 'utf8') + +`header` must be an object with an `alg` property. `header.alg` must be +one a value found in `jws.ALGORITHMS`. See above for a table of +supported algorithms. + +If `payload` is not a buffer or a string, it will be coerced into a string +using `JSON.stringify`. + +Example + +```js +const signature = jws.sign({ + header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + secret: 'has a van', +}); +``` + +## jws.verify(signature, algorithm, secretOrKey) + +(Synchronous) Returns `true` or `false` for whether a signature matches a +secret or key. + +`signature` is a JWS Signature. `header.alg` must be a value found in `jws.ALGORITHMS`. +See above for a table of supported algorithms. `secretOrKey` is a string or +buffer containing either the secret for HMAC algorithms, or the PEM +encoded public key for RSA and ECDSA. + +Note that the `"alg"` value from the signature header is ignored. + + +## jws.decode(signature) + +(Synchronous) Returns the decoded header, decoded payload, and signature +parts of the JWS Signature. + +Returns an object with three properties, e.g. +```js +{ header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + signature: 'YOWPewyGHKu4Y_0M_vtlEnNlqmFOclqp4Hy6hVHfFT4' +} +``` + +## jws.createSign(options) + +Returns a new SignStream object. + +Options: + +* `header` (required) +* `payload` +* `key` || `privateKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +Other than `header`, all options expect a string or a buffer when the +value is known ahead of time, or a stream for convenience. +`key`/`privateKey`/`secret` may also be an object when using an encrypted +private key, see the [crypto documentation][encrypted-key-docs]. + +Example: + +```js + +// This... +jws.createSign({ + header: { alg: 'RS256' }, + privateKey: privateKeyStream, + payload: payloadStream, +}).on('done', function(signature) { + // ... +}); + +// is equivalent to this: +const signer = jws.createSign({ + header: { alg: 'RS256' }, +}); +privateKeyStream.pipe(signer.privateKey); +payloadStream.pipe(signer.payload); +signer.on('done', function(signature) { + // ... +}); +``` + +## jws.createVerify(options) + +Returns a new VerifyStream object. + +Options: + +* `signature` +* `algorithm` +* `key` || `publicKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +All options expect a string or a buffer when the value is known ahead of +time, or a stream for convenience. + +Example: + +```js + +// This... +jws.createVerify({ + publicKey: pubKeyStream, + signature: sigStream, +}).on('done', function(verified, obj) { + // ... +}); + +// is equivilant to this: +const verifier = jws.createVerify(); +pubKeyStream.pipe(verifier.publicKey); +sigStream.pipe(verifier.signature); +verifier.on('done', function(verified, obj) { + // ... +}); +``` + +## Class: SignStream + +A `Readable Stream` that emits a single data event (the calculated +signature) when done. + +### Event: 'done' +`function (signature) { }` + +### signer.payload + +A `Writable Stream` that expects the JWS payload. Do *not* use if you +passed a `payload` option to the constructor. + +Example: + +```js +payloadStream.pipe(signer.payload); +``` + +### signer.secret
signer.key
signer.privateKey + +A `Writable Stream`. Expects the JWS secret for HMAC, or the privateKey +for ECDSA and RSA. Do *not* use if you passed a `secret` or `key` option +to the constructor. + +Example: + +```js +privateKeyStream.pipe(signer.privateKey); +``` + +## Class: VerifyStream + +This is a `Readable Stream` that emits a single data event, the result +of whether or not that signature was valid. + +### Event: 'done' +`function (valid, obj) { }` + +`valid` is a boolean for whether or not the signature is valid. + +### verifier.signature + +A `Writable Stream` that expects a JWS Signature. Do *not* use if you +passed a `signature` option to the constructor. + +### verifier.secret
verifier.key
verifier.publicKey + +A `Writable Stream` that expects a public key or secret. Do *not* use if you +passed a `key` or `secret` option to the constructor. + +# TODO + +* It feels like there should be some convenience options/APIs for + defining the algorithm rather than having to define a header object + with `{ alg: 'ES512' }` or whatever every time. + +* X.509 support, ugh + +# License + +MIT + +``` +Copyright (c) 2013-2015 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` + +[encrypted-key-docs]: https://nodejs.org/api/crypto.html#crypto_sign_sign_private_key_output_format diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md new file mode 100644 index 0000000..660ffec --- /dev/null +++ b/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md new file mode 100644 index 0000000..55f09b7 --- /dev/null +++ b/node_modules/node-fetch/README.md @@ -0,0 +1,634 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] +[![Discord][discord-image]][discord-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + +[![Backers][opencollective-image]][opencollective-url] + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Extract Set-Cookie Header](#extract-set-cookie-header) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body on both request and response. +- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch +``` + +## Loading and configuring the module +We suggest you load the module via `require` until the stabilization of ES modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through `fetch.Promise`: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch +errors -- the longer a response runs, the more likely it is to encounter an error. + +```js +const fetch = require('node-fetch'); +const response = await fetch('https://httpbin.org/stream/3'); +try { + for await (const chunk of response.body) { + console.dir(JSON.parse(chunk.toString())); + } +} catch (err) { + console.error(err.stack); +} +``` + +In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams +did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors +directly from the stream and wait on it response to fully close. + +```js +const fetch = require('node-fetch'); +const read = async body => { + let error; + body.on('error', err => { + error = err; + }); + for await (const chunk of body) { + console.dir(JSON.parse(chunk.toString())); + } + return new Promise((resolve, reject) => { + body.on('close', () => { + error ? reject(error) : resolve(); + }); + }); +}; +try { + const response = await fetch('https://httpbin.org/stream/3'); + await read(response.body); +} catch (err) { + console.error(err.stack); +} +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Extract Set-Cookie Header + +Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API. + +```js +fetch(url).then(res => { + // returns an array of values, instead of a string of comma-separated values + console.log(res.headers.raw()['set-cookie']); +}); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may cancel streamed requests only on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as the following: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance or function that returns an instance (see below) +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + +Note: when `body` is a `Stream`, `Content-Length` is not set automatically. + +##### Custom Agent + +The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following: + +- Support self-signed certificate +- Use only IPv4 or IPv6 +- Custom DNS Lookup + +See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. + +If no agent is specified, the default agent provided by Node.js is used. Note that [this changed in Node.js 19](https://github.com/nodejs/node/blob/4267b92604ad78584244488e7f7508a690cb80d0/lib/_http_agent.js#L564) to have `keepalive` true by default. If you wish to enable `keepalive` in an earlier version of Node.js, you can override the agent as per the following code sample. + +In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. + +```js +const httpAgent = new http.Agent({ + keepAlive: true +}); +const httpsAgent = new https.Agent({ + keepAlive: true +}); + +const options = { + agent: function (_parsedURL) { + if (_parsedURL.protocol == 'http:') { + return httpAgent; + } else { + return httpsAgent; + } + } +} +``` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A `String` or [`Readable` stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible. + +(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square +[discord-url]: https://discord.gg/Zxbndcm +[opencollective-image]: https://opencollective.com/node-fetch/backers.svg +[opencollective-url]: https://opencollective.com/node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js new file mode 100644 index 0000000..ee86265 --- /dev/null +++ b/node_modules/node-fetch/browser.js @@ -0,0 +1,25 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var globalObject = getGlobal(); + +module.exports = exports = globalObject.fetch; + +// Needed for TypeScript and Webpack. +if (globalObject.fetch) { + exports.default = globalObject.fetch.bind(globalObject); +} + +exports.Headers = globalObject.Headers; +exports.Request = globalObject.Request; +exports.Response = globalObject.Response; diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 0000000..aae9799 --- /dev/null +++ b/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1777 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js new file mode 100644 index 0000000..567ff5d --- /dev/null +++ b/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1787 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var whatwgUrl = _interopDefault(require('whatwg-url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 0000000..2863dd9 --- /dev/null +++ b/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1775 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json new file mode 100644 index 0000000..e0be176 --- /dev/null +++ b/node_modules/node-fetch/package.json @@ -0,0 +1,89 @@ +{ + "name": "node-fetch", + "version": "2.7.0", + "description": "A light-weight module that brings window.fetch to node.js", + "main": "lib/index.js", + "browser": "./browser.js", + "module": "lib/index.mjs", + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "engines": { + "node": "4.x || >=6.0.0" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "prepare": "npm run build", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json" + }, + "repository": { + "type": "git", + "url": "https://github.com/bitinn/node-fetch.git" + }, + "keywords": [ + "fetch", + "http", + "promise" + ], + "author": "David Frank", + "license": "MIT", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "homepage": "https://github.com/bitinn/node-fetch", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-polyfill": "^6.26.0", + "babel-preset-env": "1.4.0", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "teeny-request": "3.7.0" + }, + "release": { + "branches": [ + "+([0-9]).x", + "main", + "next", + { + "name": "beta", + "prerelease": true + } + ] + } +} diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..19117c1 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/node_modules/readable-stream/errors-browser.js b/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/node_modules/readable-stream/errors.js b/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/node_modules/readable-stream/experimentalWarning.js b/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..19abfa6 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..24a6bdd --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..df1f608 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..1ccb715 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..292415e --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..742c5a4 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..69bda49 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..31a17c4 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..59c671b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..0a34ee9 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..e6f3924 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..3fbf892 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..ade59e7 --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/node_modules/retry-request/CHANGELOG.md b/node_modules/retry-request/CHANGELOG.md new file mode 100644 index 0000000..894f765 --- /dev/null +++ b/node_modules/retry-request/CHANGELOG.md @@ -0,0 +1,91 @@ +# Changelog + +## [7.0.2](https://github.com/googleapis/retry-request/compare/v7.0.1...v7.0.2) (2024-01-19) + + +### Bug Fixes + +* Get rid of debug runtime dependency ([#118](https://github.com/googleapis/retry-request/issues/118)) ([85e1bd0](https://github.com/googleapis/retry-request/commit/85e1bd0a46630876757344405a6c8620d9de6451)) + +## [7.0.1](https://github.com/googleapis/retry-request/compare/v7.0.0...v7.0.1) (2023-10-12) + + +### Bug Fixes + +* Types ([#109](https://github.com/googleapis/retry-request/issues/109)) ([35c0260](https://github.com/googleapis/retry-request/commit/35c02608138a4b66cf488e4e1e90cbff05d42c2f)) + +## [7.0.0](https://github.com/googleapis/retry-request/compare/v6.0.0...v7.0.0) (2023-10-11) + + +### ⚠ BREAKING CHANGES + +* Remove `request` fallback, Update Docs, Update Types ([#107](https://github.com/googleapis/retry-request/issues/107)) + +### Features + +* Remove `request` fallback, Update Docs, Update Types ([#107](https://github.com/googleapis/retry-request/issues/107)) ([ba8abb2](https://github.com/googleapis/retry-request/commit/ba8abb242ff21921a81e3178081806e95a902a68)) + +## [6.0.0](https://github.com/googleapis/retry-request/compare/v5.0.2...v6.0.0) (2023-07-20) + + +### ⚠ BREAKING CHANGES + +* Move to node 14 as the minimum version ([#97](https://github.com/googleapis/retry-request/issues/97)) +* move minimum version to node 14 ([#96](https://github.com/googleapis/retry-request/issues/96)) + +### Features + +* Move to node 14 as the minimum version ([#97](https://github.com/googleapis/retry-request/issues/97)) ([f42d074](https://github.com/googleapis/retry-request/commit/f42d0743de048fdf46c7f480f3dec59c2caefa5a)) + + +### Miscellaneous Chores + +* Move minimum version to node 14 ([#96](https://github.com/googleapis/retry-request/issues/96)) ([2167a91](https://github.com/googleapis/retry-request/commit/2167a91c36e95522ad7e7293b4744ddd81c6166e)) + +## [5.0.2](https://github.com/googleapis/retry-request/compare/v5.0.1...v5.0.2) (2022-09-14) + + +### Bug Fixes + +* Delete second LICENSE ([#82](https://github.com/googleapis/retry-request/issues/82)) ([9dfaf18](https://github.com/googleapis/retry-request/commit/9dfaf1819b18b6d660924951b9cf1c509b6e9870)) +* Remove pip install statements ([#1546](https://github.com/googleapis/retry-request/issues/1546)) ([#79](https://github.com/googleapis/retry-request/issues/79)) ([cb4e15d](https://github.com/googleapis/retry-request/commit/cb4e15d6e8ae9d72855ed1071957afe9d301b12e)) + +## [5.0.1](https://github.com/googleapis/retry-request/compare/v5.0.0...v5.0.1) (2022-06-09) + + +### Bug Fixes + +* respect totalTimeout and do not retry if nextRetryDelay is <= 0 ([#38](https://github.com/googleapis/retry-request/issues/38)) ([9501a42](https://github.com/googleapis/retry-request/commit/9501a42d06a620282dcd2ff9990fd0b5033a990b)) + +## [5.0.0](https://github.com/googleapis/retry-request/compare/v4.2.2...v5.0.0) (2022-05-06) + + +### ⚠ BREAKING CHANGES + +* drop node 10 (#68) + +### Build System + +* drop node 10 ([#68](https://github.com/googleapis/retry-request/issues/68)) ([00ec90c](https://github.com/googleapis/retry-request/commit/00ec90c4d3cb29245ca746e0e133fcddc22d2251)) + +### [4.2.1](https://github.com/googleapis/retry-request/compare/v4.2.0...v4.2.1) (2022-04-12) + + +### Bug Fixes + +* add new retry options to types ([#36](https://github.com/googleapis/retry-request/issues/36)) ([3f10798](https://github.com/googleapis/retry-request/commit/3f10798f47c03b50f1ba352b04d09ea3d0458b9c)) +* use extend instead of object.assign ([#37](https://github.com/googleapis/retry-request/issues/37)) ([8c8dcdd](https://github.com/googleapis/retry-request/commit/8c8dcdd7d6262ce305c93fa4a8a7b2630e984824)) + +## [4.2.0](https://github.com/googleapis/retry-request/compare/v4.1.0...v4.2.0) (2022-04-06) + + +### Features + +* support enhanced retry settings ([#35](https://github.com/googleapis/retry-request/issues/35)) ([0184676](https://github.com/googleapis/retry-request/commit/0184676dee36596fb939fb4559af11d0a14f64bd)) + + +### Bug Fixes + +* add new retry options to types ([#36](https://github.com/googleapis/retry-request/issues/36)) ([3f10798](https://github.com/googleapis/retry-request/commit/3f10798f47c03b50f1ba352b04d09ea3d0458b9c)) +* correctly calculate retry attempt ([#33](https://github.com/googleapis/retry-request/issues/33)) ([4c852e2](https://github.com/googleapis/retry-request/commit/4c852e2ba22a7f75edfb3c905bd37a7e9913e67d)) +* use extend instead of object.assign ([#37](https://github.com/googleapis/retry-request/issues/37)) ([8c8dcdd](https://github.com/googleapis/retry-request/commit/8c8dcdd7d6262ce305c93fa4a8a7b2630e984824)) diff --git a/node_modules/retry-request/index.d.ts b/node_modules/retry-request/index.d.ts new file mode 100644 index 0000000..3627f45 --- /dev/null +++ b/node_modules/retry-request/index.d.ts @@ -0,0 +1,42 @@ +declare module 'retry-request' { + import * as request from 'request'; + import * as teenyRequest from 'teeny-request'; + + type teenyRequestFunction = typeof teenyRequest extends Function + ? typeof teenyRequest + : never; + + namespace retryRequest { + /** + * Set the defaults for `retryRequest`. + */ + const defaults: retryRequest.Options; + /** + * Determines the next retry based on the provided configuration. + */ + function getNextRetryDelay(config: Options): number; + interface Options { + objectMode?: boolean; + request: typeof request | teenyRequestFunction; + retries?: number; + noResponseRetries?: number; + currentRetryAttempt?: number; + maxRetryDelay?: number; + retryDelayMultiplier?: number; + totalTimeout?: number; + shouldRetryFn?: (response: request.RequestResponse) => boolean; + } + } + + function retryRequest( + requestOpts: request.Options, + opts: retryRequest.Options, + callback?: request.RequestCallback + ): {abort: () => void}; + function retryRequest( + requestOpts: request.Options, + callback?: request.RequestCallback + ): {abort: () => void}; + + export = retryRequest; +} diff --git a/node_modules/retry-request/index.js b/node_modules/retry-request/index.js new file mode 100644 index 0000000..2fae107 --- /dev/null +++ b/node_modules/retry-request/index.js @@ -0,0 +1,293 @@ +'use strict'; + +const {PassThrough} = require('stream'); +const extend = require('extend'); + +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } + + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; diff --git a/node_modules/retry-request/license b/node_modules/retry-request/license new file mode 100644 index 0000000..df6eeb5 --- /dev/null +++ b/node_modules/retry-request/license @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2015 Stephen Sawchuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/retry-request/package.json b/node_modules/retry-request/package.json new file mode 100644 index 0000000..7389612 --- /dev/null +++ b/node_modules/retry-request/package.json @@ -0,0 +1,48 @@ +{ + "name": "retry-request", + "version": "7.0.2", + "description": "Retry a request.", + "main": "index.js", + "repository": "stephenplusplus/retry-request", + "scripts": { + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "test": "mocha --timeout 30000", + "system-test": "" + }, + "files": [ + "index.js", + "index.d.ts", + "license" + ], + "types": "index.d.ts", + "keywords": [ + "request", + "retry", + "stream" + ], + "author": "Stephen Sawchuk ", + "license": "MIT", + "engines": { + "node": ">=14" + }, + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "devDependencies": { + "async": "^3.0.1", + "gts": "^5.0.0", + "jsdoc": "^4.0.0", + "jsdoc-fresh": "^3.0.0", + "jsdoc-region-tag": "^3.0.0", + "linkinator": "^4.0.0", + "lodash.range": "^3.2.0", + "mocha": "^10.2.0", + "typescript": "^5.1.6" + } +} diff --git a/node_modules/retry-request/readme.md b/node_modules/retry-request/readme.md new file mode 100644 index 0000000..0c3e0f0 --- /dev/null +++ b/node_modules/retry-request/readme.md @@ -0,0 +1,199 @@ +|![retry-request](logo.png) +|:-: +|Retry a [request][request] with built-in [exponential backoff](https://developers.google.com/analytics/devguides/reporting/core/v3/coreErrors#backoff). + +```sh +$ npm install --save teeny-request +$ npm install --save retry-request +``` + +```js +var request = require('retry-request', { + request: require('teeny-request'), +}); +``` + +It should work the same as `request` and `teeny-request` in both callback mode and stream mode. + +Note: This module only works when used as a readable stream, i.e. POST requests aren't supported ([#3](https://github.com/stephenplusplus/retry-request/issues/3)). + +## Do I need to install `request`? + +Yes! You must independently install `teeny-request` OR `request` (_deprecated_) and provide it to this library: + +```js +var request = require('retry-request', { + request: require('teeny-request'), +}); +``` + +#### Callback + +`urlThatReturns503` will be requested 3 total times before giving up and executing the callback. + +```js +request(urlThatReturns503, function (err, resp, body) {}); +``` + +#### Stream + +`urlThatReturns503` will be requested 3 total times before giving up and emitting the `response` and `complete` event as usual. + +```js +request(urlThatReturns503) + .on('error', function () {}) + .on('response', function () {}) + .on('complete', function () {}); +``` + +## Can I monitor what retry-request is doing internally? + +Yes! To enable the debug mode, set the environment variable `DEBUG` to _retry-request_. + +(Thanks for the implementation, @yihaozhadan!) + +## request(requestOptions, [opts], [cb]) + +### requestOptions + +Passed directly to `request` or `teeny-request`. See the list of options supported: + +- https://github.com/request/request/#requestoptions-callback +- https://github.com/googleapis/teeny-request#teenyrequestoptions-callback + +### opts _(optional)_ + +#### `opts.noResponseRetries` + +Type: `Number` + +Default: `2` + +The number of times to retry after a response fails to come through, such as a DNS resolution error or a socket hangup. + +```js +var opts = { + noResponseRetries: 0, +}; + +request(url, opts, function (err, resp, body) { + // url was requested 1 time before giving up and + // executing this callback. +}); +``` + +#### `opts.objectMode` + +Type: `Boolean` + +Default: `false` + +Set to `true` if your custom `opts.request` function returns a stream in object mode. + +#### `opts.retries` + +Type: `Number` + +Default: `2` + +```js +var opts = { + retries: 4, +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // urlThatReturns503 was requested a total of 5 times + // before giving up and executing this callback. +}); +``` + +#### `opts.currentRetryAttempt` + +Type: `Number` + +Default: `0` + +```js +var opts = { + currentRetryAttempt: 1, +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // urlThatReturns503 was requested as if it already failed once. +}); +``` + +#### `opts.shouldRetryFn` + +Type: `Function` + +Default: Returns `true` if [http.incomingMessage](https://nodejs.org/api/http.html#http_http_incomingmessage).statusCode is < 200 or >= 400. + +```js +var opts = { + shouldRetryFn: function (incomingHttpMessage) { + return incomingHttpMessage.statusMessage !== 'OK'; + }, +}; + +request(urlThatReturnsNonOKStatusMessage, opts, function (err, resp, body) { + // urlThatReturnsNonOKStatusMessage was requested a + // total of 3 times, each time using `opts.shouldRetryFn` + // to decide if it should continue before giving up and + // executing this callback. +}); +``` + +#### `opts.request` + +Type: `Function` + +If we not provided we will throw an error advising you to provide it. + +_NOTE: If you override the request function, and it returns a stream in object mode, be sure to set `opts.objectMode` to `true`._ + +```js +var originalRequest = require('teeny-request').defaults({ + pool: { + maxSockets: Infinity, + }, +}); + +var opts = { + request: originalRequest, +}; + +request(urlThatReturns503, opts, function (err, resp, body) { + // Your provided `originalRequest` instance was used. +}); +``` + +#### `opts.maxRetryDelay` + +Type: `Number` + +Default: `64` + +The maximum time to delay in seconds. If retryDelayMultiplier results in a delay greater than maxRetryDelay, retries should delay by maxRetryDelay seconds instead. + +#### `opts.retryDelayMultiplier` + +Type: `Number` + +Default: `2` + +The multiplier by which to increase the delay time between the completion of failed requests, and the initiation of the subsequent retrying request. + +#### `opts.totalTimeout` + +Type: `Number` + +Default: `600` + +The length of time to keep retrying in seconds. The last sleep period will be shortened as necessary, so that the last retry runs at deadline (and not considerably beyond it). The total time starting from when the initial request is sent, after which an error will be returned, regardless of the retrying attempts made meanwhile. + +### cb _(optional)_ + +Passed directly to `request`. See the callback section: https://github.com/request/request/#requestoptions-callback. + +[request]: https://github.com/request/request diff --git a/node_modules/retry/License b/node_modules/retry/License new file mode 100644 index 0000000..0b58de3 --- /dev/null +++ b/node_modules/retry/License @@ -0,0 +1,21 @@ +Copyright (c) 2011: +Tim Koschützki (tim@debuggable.com) +Felix Geisendörfer (felix@debuggable.com) + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/retry/README.md b/node_modules/retry/README.md new file mode 100644 index 0000000..6d54153 --- /dev/null +++ b/node_modules/retry/README.md @@ -0,0 +1,227 @@ + +[![Build Status](https://secure.travis-ci.org/tim-kos/node-retry.svg?branch=master)](http://travis-ci.org/tim-kos/node-retry "Check this project's build status on TravisCI") +[![codecov](https://codecov.io/gh/tim-kos/node-retry/branch/master/graph/badge.svg)](https://codecov.io/gh/tim-kos/node-retry) + + +# retry + +Abstraction for exponential and custom retry strategies for failed operations. + +## Installation + + npm install retry + +## Current Status + +This module has been tested and is ready to be used. + +## Tutorial + +The example below will retry a potentially failing `dns.resolve` operation +`10` times using an exponential backoff strategy. With the default settings, this +means the last attempt is made after `17 minutes and 3 seconds`. + +``` javascript +var dns = require('dns'); +var retry = require('retry'); + +function faultTolerantResolve(address, cb) { + var operation = retry.operation(); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(err ? operation.mainError() : null, addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, addresses) { + console.log(err, addresses); +}); +``` + +Of course you can also configure the factors that go into the exponential +backoff. See the API documentation below for all available settings. +currentAttempt is an int representing the number of attempts so far. + +``` javascript +var operation = retry.operation({ + retries: 5, + factor: 3, + minTimeout: 1 * 1000, + maxTimeout: 60 * 1000, + randomize: true, +}); +``` + +## API + +### retry.operation([options]) + +Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with three additions: + +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. +* `maxRetryTime`: The maximum time (in milliseconds) that the retried operation is allowed to run. Default is `Infinity`. + +### retry.timeouts([options]) + +Returns an array of timeouts. All time `options` and return values are in +milliseconds. If `options` is an array, a copy of that array is returned. + +`options` is a JS object that can contain any of the following keys: + +* `retries`: The maximum amount of times to retry the operation. Default is `10`. Seting this to `1` means `do it once, then retry it once`. +* `factor`: The exponential factor to use. Default is `2`. +* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. +* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. +* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. + +The formula used to calculate the individual timeouts is: + +``` +Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) +``` + +Have a look at [this article][article] for a better explanation of approach. + +If you want to tune your `factor` / `times` settings to attempt the last retry +after a certain amount of time, you can use wolfram alpha. For example in order +to tune for `10` attempts in `5 minutes`, you can use this equation: + +![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) + +Explaining the various values from left to right: + +* `k = 0 ... 9`: The `retries` value (10) +* `1000`: The `minTimeout` value in ms (1000) +* `x^k`: No need to change this, `x` will be your resulting factor +* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) + +To make this a little easier for you, use wolfram alpha to do the calculations: + + + +[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html + +### retry.createTimeout(attempt, opts) + +Returns a new `timeout` (integer in milliseconds) based on the given parameters. + +`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). + +`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. + +`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). + +### retry.wrap(obj, [options], [methodNames]) + +Wrap all functions of the `obj` with retry. Optionally you can pass operation options and +an array of method names which need to be wrapped. + +``` +retry.wrap(obj) + +retry.wrap(obj, ['method1', 'method2']) + +retry.wrap(obj, {retries: 3}) + +retry.wrap(obj, {retries: 3}, ['method1', 'method2']) +``` +The `options` object can take any options that the usual call to `retry.operation` can take. + +### new RetryOperation(timeouts, [options]) + +Creates a new `RetryOperation` where `timeouts` is an array where each value is +a timeout given in milliseconds. + +Available options: +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. + +If `forever` is true, the following changes happen: +* `RetryOperation.errors()` will only output an array of one item: the last error. +* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. + +#### retryOperation.errors() + +Returns an array of all errors that have been passed to `retryOperation.retry()` so far. The +returning array has the errors ordered chronologically based on when they were passed to +`retryOperation.retry()`, which means the first passed error is at index zero and the last is +at the last index. + +#### retryOperation.mainError() + +A reference to the error object that occured most frequently. Errors are +compared using the `error.message` property. + +If multiple error messages occured the same amount of time, the last error +object with that message is returned. + +If no errors occured so far, the value is `null`. + +#### retryOperation.attempt(fn, timeoutOps) + +Defines the function `fn` that is to be retried and executes it for the first +time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. + +Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. +Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. + + +#### retryOperation.try(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.start(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.retry(error) + +Returns `false` when no `error` value is given, or the maximum amount of retries +has been reached. + +Otherwise it returns `true`, and retries the operation after the timeout for +the current attempt number. + +#### retryOperation.stop() + +Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. + +#### retryOperation.reset() + +Resets the internal state of the operation object, so that you can call `attempt()` again as if this was a new operation object. + +#### retryOperation.attempts() + +Returns an int representing the number of attempts it took to call `fn` before it was successful. + +## License + +retry is licensed under the MIT license. + + +# Changelog + +0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. + +0.9.0 Adding `unref` functionality, thanks to @satazor. + +0.8.0 Implementing retry.wrap. + +0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). + +0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. + +0.5.0 Some minor refactoring. + +0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. + +0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). + +0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/node_modules/retry/example/dns.js b/node_modules/retry/example/dns.js new file mode 100644 index 0000000..446729b --- /dev/null +++ b/node_modules/retry/example/dns.js @@ -0,0 +1,31 @@ +var dns = require('dns'); +var retry = require('../lib/retry'); + +function faultTolerantResolve(address, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, errors, addresses) { + console.warn('err:'); + console.log(err); + + console.warn('addresses:'); + console.log(addresses); +}); \ No newline at end of file diff --git a/node_modules/retry/example/stop.js b/node_modules/retry/example/stop.js new file mode 100644 index 0000000..e1ceafe --- /dev/null +++ b/node_modules/retry/example/stop.js @@ -0,0 +1,40 @@ +var retry = require('../lib/retry'); + +function attemptAsyncOperation(someInput, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + failingAsyncOperation(someInput, function(err, result) { + + if (err && err.message === 'A fatal error') { + operation.stop(); + return cb(err); + } + + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), result); + }); + }); +} + +attemptAsyncOperation('test input', function(err, errors, result) { + console.warn('err:'); + console.log(err); + + console.warn('result:'); + console.log(result); +}); + +function failingAsyncOperation(input, cb) { + return setImmediate(cb.bind(null, new Error('A fatal error'))); +} diff --git a/node_modules/retry/index.js b/node_modules/retry/index.js new file mode 100644 index 0000000..ee62f3a --- /dev/null +++ b/node_modules/retry/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/retry'); \ No newline at end of file diff --git a/node_modules/retry/lib/retry.js b/node_modules/retry/lib/retry.js new file mode 100644 index 0000000..5e85e79 --- /dev/null +++ b/node_modules/retry/lib/retry.js @@ -0,0 +1,100 @@ +var RetryOperation = require('./retry_operation'); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; diff --git a/node_modules/retry/lib/retry_operation.js b/node_modules/retry/lib/retry_operation.js new file mode 100644 index 0000000..105ce72 --- /dev/null +++ b/node_modules/retry/lib/retry_operation.js @@ -0,0 +1,162 @@ +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; diff --git a/node_modules/retry/package.json b/node_modules/retry/package.json new file mode 100644 index 0000000..48f35e8 --- /dev/null +++ b/node_modules/retry/package.json @@ -0,0 +1,36 @@ +{ + "author": "Tim Koschützki (http://debuggable.com/)", + "name": "retry", + "description": "Abstraction for exponential and custom retry strategies for failed operations.", + "license": "MIT", + "version": "0.13.1", + "homepage": "https://github.com/tim-kos/node-retry", + "repository": { + "type": "git", + "url": "git://github.com/tim-kos/node-retry.git" + }, + "files": [ + "lib", + "example" + ], + "directories": { + "lib": "./lib" + }, + "main": "index.js", + "engines": { + "node": ">= 4" + }, + "dependencies": {}, + "devDependencies": { + "fake": "0.2.0", + "istanbul": "^0.4.5", + "tape": "^4.8.0" + }, + "scripts": { + "test": "./node_modules/.bin/istanbul cover ./node_modules/tape/bin/tape ./test/integration/*.js", + "release:major": "env SEMANTIC=major npm run release", + "release:minor": "env SEMANTIC=minor npm run release", + "release:patch": "env SEMANTIC=patch npm run release", + "release": "npm version ${SEMANTIC:-patch} -m \"Release %s\" && git push && git push --tags && npm publish" + } +} diff --git a/node_modules/stream-events/index.d.ts b/node_modules/stream-events/index.d.ts new file mode 100644 index 0000000..56ed748 --- /dev/null +++ b/node_modules/stream-events/index.d.ts @@ -0,0 +1,5 @@ +import { Stream } from "stream"; +declare function StreamEvents(stream: StreamType) + : StreamType; +declare namespace StreamEvents { } +export = StreamEvents diff --git a/node_modules/stream-events/index.js b/node_modules/stream-events/index.js new file mode 100644 index 0000000..28d50a6 --- /dev/null +++ b/node_modules/stream-events/index.js @@ -0,0 +1,30 @@ +'use strict'; + +var stubs = require('stubs') + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents diff --git a/node_modules/stream-events/package.json b/node_modules/stream-events/package.json new file mode 100644 index 0000000..bd6c778 --- /dev/null +++ b/node_modules/stream-events/package.json @@ -0,0 +1,38 @@ +{ + "name": "stream-events", + "version": "1.0.5", + "description": "Get an event when you're being sent data or asked for it.", + "main": "index.js", + "types": "index.d.ts", + "files": [ + "index.js", + "index.d.ts" + ], + "scripts": { + "test": "node ./test" + }, + "repository": { + "type": "git", + "url": "https://github.com/stephenplusplus/stream-events" + }, + "keywords": [ + "stream", + "events", + "read", + "write", + "duplexify", + "lazy-stream" + ], + "author": "Stephen Sawchuk", + "license": "MIT", + "bugs": { + "url": "https://github.com/stephenplusplus/stream-events/issues" + }, + "homepage": "https://github.com/stephenplusplus/stream-events", + "devDependencies": { + "duplexify": "^3.2.0" + }, + "dependencies": { + "stubs": "^3.0.0" + } +} diff --git a/node_modules/stream-events/readme.md b/node_modules/stream-events/readme.md new file mode 100644 index 0000000..695a7ce --- /dev/null +++ b/node_modules/stream-events/readme.md @@ -0,0 +1,64 @@ +# stream-events + +> Get an event when you're being sent data or asked for it. + +## About + +This is just a simple thing that tells you when `_read` and `_write` have been called, saving you the trouble of writing this yourself. You receive two events `reading` and `writing`-- no magic is performed. + +This works well with [duplexify](https://github.com/mafintosh/duplexify) or lazy streams, so you can wait until you know you're being used as a stream to do something asynchronous, such as fetching an API token. + + +## Use +```sh +$ npm install --save stream-events +``` +```js +var stream = require('stream') +var streamEvents = require('stream-events') +var util = require('util') + +function MyStream() { + stream.Duplex.call(this) + streamEvents.call(this) +} +util.inherits(MyStream, stream.Duplex) + +MyStream.prototype._read = function(chunk) { + console.log('_read called as usual') + this.push(new Buffer(chunk)) + this.push(null) +} + +MyStream.prototype._write = function() { + console.log('_write called as usual') +} + +var stream = new MyStream + +stream.on('reading', function() { + console.log('stream is being asked for data') +}) + +stream.on('writing', function() { + console.log('stream is being sent data') +}) + +stream.pipe(stream) +``` + +### Using with Duplexify +```js +var duplexify = require('duplexify') +var streamEvents = require('stream-events') +var fs = require('fs') + +var dup = streamEvents(duplexify()) + +dup.on('writing', function() { + // do something async + dup.setWritable(/*writable stream*/) +}) + +fs.createReadStream('file').pipe(dup) +``` \ No newline at end of file diff --git a/node_modules/stream-shift/.github/workflows/test.yml b/node_modules/stream-shift/.github/workflows/test.yml new file mode 100644 index 0000000..9b94272 --- /dev/null +++ b/node_modules/stream-shift/.github/workflows/test.yml @@ -0,0 +1,23 @@ +name: Build Status +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + build: + strategy: + matrix: + node-version: [lts/*] + os: [ubuntu-latest, macos-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - run: npm install + - run: npm test diff --git a/node_modules/stream-shift/LICENSE b/node_modules/stream-shift/LICENSE new file mode 100644 index 0000000..bae9da7 --- /dev/null +++ b/node_modules/stream-shift/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/stream-shift/README.md b/node_modules/stream-shift/README.md new file mode 100644 index 0000000..d9cc2d9 --- /dev/null +++ b/node_modules/stream-shift/README.md @@ -0,0 +1,25 @@ +# stream-shift + +Returns the next buffer/object in a stream's readable queue + +``` +npm install stream-shift +``` + +[![build status](http://img.shields.io/travis/mafintosh/stream-shift.svg?style=flat)](http://travis-ci.org/mafintosh/stream-shift) + +## Usage + +``` js +var shift = require('stream-shift') + +console.log(shift(someStream)) // first item in its buffer +``` + +## Credit + +Thanks [@dignifiedquire](https://github.com/dignifiedquire) for making this work on node 6 + +## License + +MIT diff --git a/node_modules/stream-shift/index.js b/node_modules/stream-shift/index.js new file mode 100644 index 0000000..566df88 --- /dev/null +++ b/node_modules/stream-shift/index.js @@ -0,0 +1,21 @@ +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } + } + + return state.length +} diff --git a/node_modules/stream-shift/package.json b/node_modules/stream-shift/package.json new file mode 100644 index 0000000..8fe3301 --- /dev/null +++ b/node_modules/stream-shift/package.json @@ -0,0 +1,25 @@ +{ + "name": "stream-shift", + "version": "1.0.3", + "description": "Returns the next buffer/object in a stream's readable queue", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "standard": "^7.1.2", + "tape": "^4.6.0", + "through2": "^2.0.1" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/stream-shift.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/stream-shift/issues" + }, + "homepage": "https://github.com/mafintosh/stream-shift" +} diff --git a/node_modules/stream-shift/test.js b/node_modules/stream-shift/test.js new file mode 100644 index 0000000..bf4ed66 --- /dev/null +++ b/node_modules/stream-shift/test.js @@ -0,0 +1,48 @@ +var tape = require('tape') +var through = require('through2') +var stream = require('stream') +var shift = require('./') + +tape('shifts next', function (t) { + var passthrough = through() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer.from('hello')) + t.same(shift(passthrough), Buffer.from('world')) + t.end() +}) + +tape('shifts next with core', function (t) { + var passthrough = stream.PassThrough() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer.from('hello')) + t.same(shift(passthrough), Buffer.from('world')) + t.end() +}) + +tape('shifts next with object mode', function (t) { + var passthrough = through({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) + +tape('shifts next with object mode with core', function (t) { + var passthrough = stream.PassThrough({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..b2bb141 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,34 @@ +{ + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" + ], + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/strnum/.vscode/launch.json b/node_modules/strnum/.vscode/launch.json new file mode 100644 index 0000000..b87b349 --- /dev/null +++ b/node_modules/strnum/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jasmine Tests", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${workspaceFolder}/spec/attr_spec.js" + ], + "internalConsoleOptions": "openOnSessionStart" + },{ + "type": "node", + "request": "launch", + "name": "Jasmine Tests current test file", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${file}" + ], + "internalConsoleOptions": "openOnSessionStart" + } + ] + +} \ No newline at end of file diff --git a/node_modules/strnum/LICENSE b/node_modules/strnum/LICENSE new file mode 100644 index 0000000..6450554 --- /dev/null +++ b/node_modules/strnum/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/strnum/README.md b/node_modules/strnum/README.md new file mode 100644 index 0000000..b698f60 --- /dev/null +++ b/node_modules/strnum/README.md @@ -0,0 +1,86 @@ +# strnum +Parse string into Number based on configuration + +```bash +npm install strnum +``` +```js +const toNumber = require("strnum"); + +toNumber(undefined) // undefined +toNumber(null)) //null +toNumber("")) // "" +toNumber("string"); //"string") +toNumber("12,12"); //"12,12") +toNumber("12 12"); //"12 12") +toNumber("12-12"); //"12-12") +toNumber("12.12.12"); //"12.12.12") +toNumber("0x2f"); //47) +toNumber("-0x2f"); //-47) +toNumber("0x2f", { hex : true}); //47) +toNumber("-0x2f", { hex : true}); //-47) +toNumber("0x2f", { hex : false}); //"0x2f") +toNumber("-0x2f", { hex : false}); //"-0x2f") +toNumber("06"); //6) +toNumber("06", { leadingZeros : true}); //6) +toNumber("06", { leadingZeros : false}); //"06") + +toNumber("006"); //6) +toNumber("006", { leadingZeros : true}); //6) +toNumber("006", { leadingZeros : false}); //"006") +toNumber("0.0"); //0) +toNumber("00.00"); //0) +toNumber("0.06"); //0.06) +toNumber("00.6"); //0.6) +toNumber(".006"); //0.006) +toNumber("6.0"); //6) +toNumber("06.0"); //6) + +toNumber("0.0", { leadingZeros : false}); //0) +toNumber("00.00", { leadingZeros : false}); //"00.00") +toNumber("0.06", { leadingZeros : false}); //0.06) +toNumber("00.6", { leadingZeros : false}); //"00.6") +toNumber(".006", { leadingZeros : false}); //0.006) +toNumber("6.0" , { leadingZeros : false}); //6) +toNumber("06.0" , { leadingZeros : false}); //"06.0") +toNumber("-06"); //-6) +toNumber("-06", { leadingZeros : true}); //-6) +toNumber("-06", { leadingZeros : false}); //"-06") + +toNumber("-0.0"); //-0) +toNumber("-00.00"); //-0) +toNumber("-0.06"); //-0.06) +toNumber("-00.6"); //-0.6) +toNumber("-.006"); //-0.006) +toNumber("-6.0"); //-6) +toNumber("-06.0"); //-6) + +toNumber("-0.0" , { leadingZeros : false}); //-0) +toNumber("-00.00", { leadingZeros : false}); //"-00.00") +toNumber("-0.06", { leadingZeros : false}); //-0.06) +toNumber("-00.6", { leadingZeros : false}); //"-00.6") +toNumber("-.006", {leadingZeros : false}); //-0.006) +toNumber("-6.0" , { leadingZeros : false}); //-6) +toNumber("-06.0" , { leadingZeros : false}); //"-06.0") +toNumber("420926189200190257681175017717") ; //4.209261892001902e+29) +toNumber("000000000000000000000000017717" , { leadingZeros : false}); //"000000000000000000000000017717") +toNumber("000000000000000000000000017717" , { leadingZeros : true}); //17717) +toNumber("01.0e2" , { leadingZeros : false}); //"01.0e2") +toNumber("-01.0e2" , { leadingZeros : false}); //"-01.0e2") +toNumber("01.0e2") ; //100) +toNumber("-01.0e2") ; //-100) +toNumber("1.0e2") ; //100) + +toNumber("-1.0e2") ; //-100) +toNumber("1.0e-2"); //0.01) + +toNumber("+1212121212"); // 1212121212 +toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )); //"+1212121212" +``` + +Supported Options +```js +hex : true, //when hexadecimal string should be parsed +leadingZeros: true, //when number with leading zeros like 08 should be parsed. 0.0 is not impacted +eNotation: true //when number with eNotation or number parsed in eNotation should be considered +``` \ No newline at end of file diff --git a/node_modules/strnum/package.json b/node_modules/strnum/package.json new file mode 100644 index 0000000..c9da3ca --- /dev/null +++ b/node_modules/strnum/package.json @@ -0,0 +1,24 @@ +{ + "name": "strnum", + "version": "1.0.5", + "description": "Parse String to Number based on configuration", + "main": "strnum.js", + "scripts": { + "test": "jasmine strnum.test.js" + }, + "keywords": [ + "string", + "number", + "parse", + "convert" + ], + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/strnum" + }, + "author": "Amit Gupta (https://amitkumargupta.work/)", + "license": "MIT", + "devDependencies": { + "jasmine": "^3.10.0" + } +} diff --git a/node_modules/strnum/strnum.js b/node_modules/strnum/strnum.js new file mode 100644 index 0000000..723c08b --- /dev/null +++ b/node_modules/strnum/strnum.js @@ -0,0 +1,124 @@ +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} + + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } + + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); + + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} +module.exports = toNumber diff --git a/node_modules/strnum/strnum.test.js b/node_modules/strnum/strnum.test.js new file mode 100644 index 0000000..d0b099f --- /dev/null +++ b/node_modules/strnum/strnum.test.js @@ -0,0 +1,150 @@ +const toNumber = require("./strnum"); + +describe("Should convert all the valid numeric strings to number", () => { + it("should return undefined, null, empty string, or non-numeric as it is", () => { + expect(toNumber(undefined)).not.toBeDefined(); + expect(toNumber(null)).toEqual(null); + expect(toNumber("")).toEqual(""); + expect(toNumber("string")).toEqual("string"); + }); + it("should not parse number with spaces or comma", () => { + expect(toNumber("12,12")).toEqual("12,12"); + expect(toNumber("12 12")).toEqual("12 12"); + expect(toNumber("12-12")).toEqual("12-12"); + expect(toNumber("12.12.12")).toEqual("12.12.12"); + }) + it("should consider + sign", () => { + expect(toNumber("+12")).toEqual(12); + expect(toNumber("+ 12")).toEqual("+ 12"); + expect(toNumber("12+12")).toEqual("12+12"); + expect(toNumber("1212+")).toEqual("1212+"); + }) + it("should parse hexadecimal values", () => { + expect(toNumber("0x2f")).toEqual(47); + expect(toNumber("-0x2f")).toEqual(-47); + expect(toNumber("0x2f", { hex : true})).toEqual(47); + expect(toNumber("-0x2f", { hex : true})).toEqual(-47); + expect(toNumber("0x2f", { hex : false})).toEqual("0x2f"); + expect(toNumber("-0x2f", { hex : false})).toEqual("-0x2f"); + }) + it("should not parse strings with 0x embedded", () => { + expect(toNumber("0xzz")).toEqual("0xzz"); + expect(toNumber("iweraf0x123qwerqwer")).toEqual("iweraf0x123qwerqwer"); + expect(toNumber("1230x55")).toEqual("1230x55"); + expect(toNumber("JVBERi0xLjMNCiXi48")).toEqual("JVBERi0xLjMNCiXi48"); + }) + it("leading zeros", () => { + expect(toNumber("06")).toEqual(6); + expect(toNumber("06", { leadingZeros : true})).toEqual(6); + expect(toNumber("06", { leadingZeros : false})).toEqual("06"); + + expect(toNumber("006")).toEqual(6); + expect(toNumber("006", { leadingZeros : true})).toEqual(6); + expect(toNumber("006", { leadingZeros : false})).toEqual("006"); + + expect(toNumber("000000000000000000000000017717" , { leadingZeros : false})).toEqual("000000000000000000000000017717"); + expect(toNumber("000000000000000000000000017717" , { leadingZeros : true})).toEqual(17717); + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("0420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + }) + it("invalid floating number", () => { + expect(toNumber("20.21.030") ).toEqual("20.21.030"); + expect(toNumber("0.21.030") ).toEqual("0.21.030"); + expect(toNumber("0.21.") ).toEqual("0.21."); + expect(toNumber("0.") ).toEqual("0."); + expect(toNumber("1.") ).toEqual("1."); + }); + it("floating point and leading zeros", () => { + expect(toNumber("0.0")).toEqual(0); + expect(toNumber("00.00")).toEqual(0); + expect(toNumber("0.06")).toEqual(0.06); + expect(toNumber("00.6")).toEqual(0.6); + expect(toNumber(".006")).toEqual(0.006); + expect(toNumber("6.0")).toEqual(6); + expect(toNumber("06.0")).toEqual(6); + + expect(toNumber("0.0", { leadingZeros : false})).toEqual(0); + expect(toNumber("00.00", { leadingZeros : false})).toEqual("00.00"); + expect(toNumber("0.06", { leadingZeros : false})).toEqual(0.06); + expect(toNumber("00.6", { leadingZeros : false})).toEqual("00.6"); + expect(toNumber(".006", { leadingZeros : false})).toEqual(0.006); + expect(toNumber("6.0" , { leadingZeros : false})).toEqual(6); + expect(toNumber("06.0" , { leadingZeros : false})).toEqual("06.0"); + }) + it("negative number leading zeros", () => { + expect(toNumber("+06")).toEqual(6); + expect(toNumber("-06")).toEqual(-6); + expect(toNumber("-06", { leadingZeros : true})).toEqual(-6); + expect(toNumber("-06", { leadingZeros : false})).toEqual("-06"); + + expect(toNumber("-0.0")).toEqual(-0); + expect(toNumber("-00.00")).toEqual(-0); + expect(toNumber("-0.06")).toEqual(-0.06); + expect(toNumber("-00.6")).toEqual(-0.6); + expect(toNumber("-.006")).toEqual(-0.006); + expect(toNumber("-6.0")).toEqual(-6); + expect(toNumber("-06.0")).toEqual(-6); + + expect(toNumber("-0.0" , { leadingZeros : false})).toEqual(-0); + expect(toNumber("-00.00", { leadingZeros : false})).toEqual("-00.00"); + expect(toNumber("-0.06", { leadingZeros : false})).toEqual(-0.06); + expect(toNumber("-00.6", { leadingZeros : false})).toEqual("-00.6"); + expect(toNumber("-.006", {leadingZeros : false})).toEqual(-0.006); + expect(toNumber("-6.0" , { leadingZeros : false})).toEqual(-6); + expect(toNumber("-06.0" , { leadingZeros : false})).toEqual("-06.0"); + }) + it("long number", () => { + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("20211201030005811824") ).toEqual("20211201030005811824"); + expect(toNumber("20.211201030005811824") ).toEqual("20.211201030005811824"); + expect(toNumber("0.211201030005811824") ).toEqual("0.211201030005811824"); + }); + it("scientific notation", () => { + expect(toNumber("01.0e2" , { leadingZeros : false})).toEqual("01.0e2"); + expect(toNumber("-01.0e2" , { leadingZeros : false})).toEqual("-01.0e2"); + expect(toNumber("01.0e2") ).toEqual(100); + expect(toNumber("-01.0e2") ).toEqual(-100); + expect(toNumber("1.0e2") ).toEqual(100); + + expect(toNumber("-1.0e2") ).toEqual(-100); + expect(toNumber("1.0e-2")).toEqual(0.01); + + expect(toNumber("420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + expect(toNumber("420926189200190257681175017717" , { eNotation: false} )).toEqual("420926189200190257681175017717"); + + }); + + it("scientific notation with upper E", () => { + expect(toNumber("01.0E2" , { leadingZeros : false})).toEqual("01.0E2"); + expect(toNumber("-01.0E2" , { leadingZeros : false})).toEqual("-01.0E2"); + expect(toNumber("01.0E2") ).toEqual(100); + expect(toNumber("-01.0E2") ).toEqual(-100); + expect(toNumber("1.0E2") ).toEqual(100); + + expect(toNumber("-1.0E2") ).toEqual(-100); + expect(toNumber("1.0E-2")).toEqual(0.01); + }); + + it("should skip matching pattern", () => { + expect(toNumber("+12", { skipLike: /\+[0-9]{10}/} )).toEqual(12); + expect(toNumber("12+12", { skipLike: /\+[0-9]{10}/} )).toEqual("12+12"); + expect(toNumber("12+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("12+1212121212"); + expect(toNumber("+1212121212") ).toEqual(1212121212); + expect(toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("+1212121212"); + }) + it("should not change string if not number", () => { + expect(toNumber("+12 12")).toEqual("+12 12"); + expect(toNumber(" +12 12 ")).toEqual(" +12 12 "); + }) + it("should ignore sorrounded spaces ", () => { + expect(toNumber(" +1212 ")).toEqual(1212); + }) + + it("negative numbers", () => { + expect(toNumber("+1212")).toEqual(1212); + expect(toNumber("+12.12")).toEqual(12.12); + expect(toNumber("-12.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + }) +}); diff --git a/node_modules/stubs/index.js b/node_modules/stubs/index.js new file mode 100644 index 0000000..252f34f --- /dev/null +++ b/node_modules/stubs/index.js @@ -0,0 +1,35 @@ +'use strict' + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} diff --git a/node_modules/stubs/package.json b/node_modules/stubs/package.json new file mode 100644 index 0000000..7ced3ab --- /dev/null +++ b/node_modules/stubs/package.json @@ -0,0 +1,25 @@ +{ + "name": "stubs", + "version": "3.0.0", + "description": "Easy method stubber.", + "main": "index.js", + "scripts": { + "test": "node ./test" + }, + "repository": { + "type": "git", + "url": "https://github.com/stephenplusplus/stubs" + }, + "keywords": [ + "stubs" + ], + "author": "Stephen Sawchuk", + "license": "MIT", + "bugs": { + "url": "https://github.com/stephenplusplus/stubs/issues" + }, + "homepage": "https://github.com/stephenplusplus/stubs", + "devDependencies": { + "tess": "^1.0.0" + } +} diff --git a/node_modules/stubs/readme.md b/node_modules/stubs/readme.md new file mode 100644 index 0000000..ed07628 --- /dev/null +++ b/node_modules/stubs/readme.md @@ -0,0 +1,73 @@ +# stubs + +> It's a simple stubber. + +## About + +For when you don't want to write the same thing over and over to cache a method and call an override, then revert it, and blah blah. + + +## Use +```sh +$ npm install --save-dev stubs +``` +```js +var mylib = require('./lib/index.js') +var stubs = require('stubs') + +// make it a noop +stubs(mylib, 'create') + +// stub it out +stubs(mylib, 'create', function() { + // calls this instead +}) + +// stub it out, but call the original first +stubs(mylib, 'create', { callthrough: true }, function() { + // call original method, then call this +}) + +// use the stub for a while, then revert +stubs(mylib, 'create', { calls: 3 }, function() { + // call this 3 times, then use the original method +}) +``` + + +## API + +### stubs(object, method[[, opts], stub]) + +#### object +- Type: Object + +#### method +- Type: String + +Name of the method to stub. + +#### opts +- (optional) +- Type: Object + +##### opts.callthrough +- (optional) +- Type: Boolean +- Default: `false` + +Call the original method as well as the stub (if a stub is provided). + +##### opts.calls +- (optional) +- Type: Number +- Default: `0` (never revert) + +Number of calls to allow the stub to receive until reverting to the original. + +#### stub +- (optional) +- Type: Function +- Default: `function() {}` + +This method is called in place of the original method. If `opts.callthrough` is `true`, this method is called *after* the original method is called as well. \ No newline at end of file diff --git a/node_modules/stubs/test.js b/node_modules/stubs/test.js new file mode 100644 index 0000000..cae408e --- /dev/null +++ b/node_modules/stubs/test.js @@ -0,0 +1,172 @@ +'use strict' + +var assert = require('assert') +var stubs = require('./') +var tess = require('tess') + +tess('init', function(it) { + it('is a function', function() { + assert.equal(typeof stubs, 'function') + }) + + it('throws without obj || method', function() { + assert.throws(function() { + stubs() + }, /must provide/) + + assert.throws(function() { + stubs({}) + }, /must provide/) + }) +}) + +tess('stubs', function(it) { + it('stubs a method with a noop', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + stubs(obj, 'method') + + obj.method() + + assert(!originalCalled) + }) + + it('accepts an override', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + var replacementCalled = false + stubs(obj, 'method', function() { + replacementCalled = true + }) + + obj.method() + assert(!originalCalled) + assert(replacementCalled) + }) + + it('returns value of overridden method call', function() { + var overriddenUniqueVal = {} + + var obj = {} + obj.method = function() {} + + stubs(obj, 'method', { callthrough: true }, function() { + return overriddenUniqueVal + }) + + assert.strictEqual(obj.method(), overriddenUniqueVal) + }) + + it('calls through to original method', function() { + var originalCalled = false + + var obj = {} + obj.method = function() { + originalCalled = true + } + + var replacementCalled = false + stubs(obj, 'method', { callthrough: true }, function() { + replacementCalled = true + }) + + obj.method() + assert(originalCalled) + assert(replacementCalled) + }) + + it('returns value of original method call', function() { + var uniqueVal = Date.now() + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() {}) + + assert.equal(obj.method(), uniqueVal) + }) + + it('returns calls override and returns original value', function() { + var uniqueVal = {} + var overrideWasCalled = false + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() { + // does not return anything + overrideWasCalled = true + }) + + assert.strictEqual(obj.method(), uniqueVal) + assert.strictEqual(overrideWasCalled, true) + }) + + it('returns value of overridden method call', function() { + var uniqueVal = {} + var overriddenUniqueVal = {} + + var obj = {} + obj.method = function() { + return uniqueVal + } + + stubs(obj, 'method', { callthrough: true }, function() { + return overriddenUniqueVal + }) + + assert.strictEqual(obj.method(), overriddenUniqueVal) + }) + + it('stops calling stub after n calls', function() { + var timesToCall = 5 + var timesCalled = 0 + + var obj = {} + obj.method = function() { + assert.equal(timesCalled, timesToCall) + } + + stubs(obj, 'method', { calls: timesToCall }, function() { + timesCalled++ + }) + + obj.method() // 1 (stub) + obj.method() // 2 (stub) + obj.method() // 3 (stub) + obj.method() // 4 (stub) + obj.method() // 5 (stub) + obj.method() // 6 (original) + }) + + it('calls stub in original context of obj', function() { + var secret = 'brownies' + + function Class() { + this.method = function() {} + this.secret = secret + } + + var cl = new Class() + + stubs(cl, 'method', function() { + assert.equal(this.secret, secret) + }) + + cl.method() + }) +}) diff --git a/node_modules/teeny-request/CHANGELOG.md b/node_modules/teeny-request/CHANGELOG.md new file mode 100644 index 0000000..dcf3642 --- /dev/null +++ b/node_modules/teeny-request/CHANGELOG.md @@ -0,0 +1,253 @@ +# Changelog + +## [9.0.0](https://github.com/googleapis/teeny-request/compare/v8.0.3...v9.0.0) (2023-07-20) + + +### ⚠ BREAKING CHANGES + +* move minimum version to node 14 ([#312](https://github.com/googleapis/teeny-request/issues/312)) + +### Miscellaneous Chores + +* Move minimum version to node 14 ([#312](https://github.com/googleapis/teeny-request/issues/312)) ([e30cf76](https://github.com/googleapis/teeny-request/commit/e30cf765fb7997e36cf7cc8144dfe8480d12c58b)) + +## [8.0.3](https://github.com/googleapis/teeny-request/compare/v8.0.2...v8.0.3) (2023-02-17) + + +### Bug Fixes + +* Use pipeline in place of pipe to better handle cleanup / errors ([#299](https://github.com/googleapis/teeny-request/issues/299)) ([31d9002](https://github.com/googleapis/teeny-request/commit/31d900227d2ea7cb205fd4dc8db0aed5abd0dfa9)) + +## [8.0.2](https://github.com/googleapis/teeny-request/compare/v8.0.1...v8.0.2) (2022-09-09) + + +### Bug Fixes + +* **deps:** Update dependency uuid to v9 ([#292](https://github.com/googleapis/teeny-request/issues/292)) ([f9fa756](https://github.com/googleapis/teeny-request/commit/f9fa75647913e2be10a48635168b11520d70982f)) + +## [8.0.1](https://github.com/googleapis/teeny-request/compare/v8.0.0...v8.0.1) (2022-08-23) + + +### Bug Fixes + +* remove pip install statements ([#1546](https://github.com/googleapis/teeny-request/issues/1546)) ([#290](https://github.com/googleapis/teeny-request/issues/290)) ([94e7a86](https://github.com/googleapis/teeny-request/commit/94e7a86f13ab380399049e110c648cd381fb1e00)) + +## [8.0.0](https://github.com/googleapis/teeny-request/compare/v7.2.0...v8.0.0) (2022-05-09) + + +### ⚠ BREAKING CHANGES + +* drop node 10, update typescript to 4.6.3 (#279) + +### Build System + +* drop node 10, update typescript to 4.6.3 ([#279](https://github.com/googleapis/teeny-request/issues/279)) ([6f12739](https://github.com/googleapis/teeny-request/commit/6f1273993ae667078abae41a8029833749a25e6a)) + +## [7.2.0](https://github.com/googleapis/teeny-request/compare/v7.1.3...v7.2.0) (2022-03-31) + + +### Features + +* Add support for `NO_PROXY` ([#272](https://github.com/googleapis/teeny-request/issues/272)) ([b02b6e5](https://github.com/googleapis/teeny-request/commit/b02b6e515575814bbef7360586d07030b445705d)) + +### [7.1.3](https://www.github.com/googleapis/teeny-request/compare/v7.1.2...v7.1.3) (2021-09-24) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v5 ([#253](https://www.github.com/googleapis/teeny-request/issues/253)) ([c84dff4](https://www.github.com/googleapis/teeny-request/commit/c84dff45e4a0e71fc5205838dae51f57f3139c06)) + +### [7.1.2](https://www.github.com/googleapis/teeny-request/compare/v7.1.1...v7.1.2) (2021-09-10) + + +### Bug Fixes + +* **build:** set default branch to main ([#247](https://www.github.com/googleapis/teeny-request/issues/247)) ([db3126d](https://www.github.com/googleapis/teeny-request/commit/db3126df77bc7e6cebb7d092001bbd3a444207d6)) + +### [7.1.1](https://www.github.com/googleapis/teeny-request/compare/v7.1.0...v7.1.1) (2021-06-30) + + +### Bug Fixes + +* throw error if missing uri or url ([#239](https://www.github.com/googleapis/teeny-request/issues/239)) ([4d770e3](https://www.github.com/googleapis/teeny-request/commit/4d770e3b89254c4cec30c422cdcdad257500c9cc)) + +## [7.1.0](https://www.github.com/googleapis/teeny-request/compare/v7.0.1...v7.1.0) (2021-05-19) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#224](https://www.github.com/googleapis/teeny-request/issues/224)) ([3e7424f](https://www.github.com/googleapis/teeny-request/commit/3e7424fb63f0c0dca3606d6e1fc2f294f9d86ba5)) + + +### Bug Fixes + +* Buffer is allow as body without encoding to string ([#223](https://www.github.com/googleapis/teeny-request/issues/223)) ([d9bcdc3](https://www.github.com/googleapis/teeny-request/commit/d9bcdc36a60074fd78718ea8f7c4745d640e3b4f)) + +### [7.0.1](https://www.github.com/googleapis/teeny-request/compare/v7.0.0...v7.0.1) (2020-09-29) + + +### Bug Fixes + +* **deps:** update node-fetch to 2.6.1 ([#200](https://www.github.com/googleapis/teeny-request/issues/200)) ([8958a78](https://www.github.com/googleapis/teeny-request/commit/8958a78b117e5610f3ccf121ba1d650dbe9739f8)) + +## [7.0.0](https://www.github.com/googleapis/teeny-request/compare/v6.0.3...v7.0.0) (2020-06-01) + + +### ⚠ BREAKING CHANGES + +* dropping support for Node.js 8.x + +### Features + +* pass agent options when using agent pool config ([#149](https://www.github.com/googleapis/teeny-request/issues/149)) ([38ece79](https://www.github.com/googleapis/teeny-request/commit/38ece79151b667ec1a72ec50b1c7a58258924794)) +* warn on too many concurrent requests ([#165](https://www.github.com/googleapis/teeny-request/issues/165)) ([88ff2d0](https://www.github.com/googleapis/teeny-request/commit/88ff2d0d8e0fc25a4219ef5625b8de353ed4aa29)) + + +### Bug Fixes + +* apache license URL ([#468](https://www.github.com/googleapis/teeny-request/issues/468)) ([#156](https://www.github.com/googleapis/teeny-request/issues/156)) ([01ac7bd](https://www.github.com/googleapis/teeny-request/commit/01ac7bd01e870796fd15355e079649633d5d5983)) +* update template files for Node.js libraries ([#152](https://www.github.com/googleapis/teeny-request/issues/152)) ([89833c3](https://www.github.com/googleapis/teeny-request/commit/89833c3c3e8afea04c85a60811f122c5a6d37e48)) +* **deps:** update dependency uuid to v8 ([#164](https://www.github.com/googleapis/teeny-request/issues/164)) ([2ab8155](https://www.github.com/googleapis/teeny-request/commit/2ab81550aeb8ca914516ff4ac20ebbb7b3d73fa5)) + + +### Build System + +* drop support for node.js 8.x ([#159](https://www.github.com/googleapis/teeny-request/issues/159)) ([d87aa73](https://www.github.com/googleapis/teeny-request/commit/d87aa73d3fafbdc013b03b7629a41decda6da98a)) + +### [6.0.3](https://www.github.com/googleapis/teeny-request/compare/v6.0.2...v6.0.3) (2020-03-06) + + +### Bug Fixes + +* **deps:** update dependency uuid to v7 ([#134](https://www.github.com/googleapis/teeny-request/issues/134)) ([97817bf](https://www.github.com/googleapis/teeny-request/commit/97817bfb12396f620b2e280dcdc8965c4815abb5)) + +### [6.0.2](https://www.github.com/googleapis/teeny-request/compare/v6.0.1...v6.0.2) (2020-02-10) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v5 ([#128](https://www.github.com/googleapis/teeny-request/issues/128)) ([5dcef3f](https://www.github.com/googleapis/teeny-request/commit/5dcef3f5883b24a1092def38004074d04e37e241)) + +### [6.0.1](https://www.github.com/googleapis/teeny-request/compare/v6.0.0...v6.0.1) (2020-01-24) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v4 ([#121](https://www.github.com/googleapis/teeny-request/issues/121)) ([7caabcf](https://www.github.com/googleapis/teeny-request/commit/7caabcf154d8cf0848e443ce2cd4fbfae913ca41)) + +## [6.0.0](https://www.github.com/googleapis/teeny-request/compare/v5.3.3...v6.0.0) (2020-01-11) + + +### ⚠ BREAKING CHANGES + +* remove console log and throw instead (#107) + +### Bug Fixes + +* remove console log and throw instead ([#107](https://www.github.com/googleapis/teeny-request/issues/107)) ([965beaa](https://www.github.com/googleapis/teeny-request/commit/965beaae17f0273992c9856ebf79b6f1befc59fe)) + +### [5.3.3](https://www.github.com/googleapis/teeny-request/compare/v5.3.2...v5.3.3) (2019-12-15) + + +### Bug Fixes + +* **deps:** update dependency http-proxy-agent to v3 ([#104](https://www.github.com/googleapis/teeny-request/issues/104)) ([35a47d8](https://www.github.com/googleapis/teeny-request/commit/35a47d83adf92b16eab3fce52deae0e3c1353aa6)) +* **deps:** update dependency https-proxy-agent to v4 ([#105](https://www.github.com/googleapis/teeny-request/issues/105)) ([26b67af](https://www.github.com/googleapis/teeny-request/commit/26b67afcb084ce1b99a62ecc55050d6f8f8aaee4)) +* **docs:** add jsdoc-region-tag plugin ([#98](https://www.github.com/googleapis/teeny-request/issues/98)) ([8f3c35a](https://www.github.com/googleapis/teeny-request/commit/8f3c35aee711a1262ffa7c058eb1b9f18204b80e)) + +### [5.3.2](https://www.github.com/googleapis/teeny-request/compare/v5.3.1...v5.3.2) (2019-12-05) + + +### Bug Fixes + +* **deps:** pin TypeScript below 3.7.0 ([#102](https://www.github.com/googleapis/teeny-request/issues/102)) ([c0b81e6](https://www.github.com/googleapis/teeny-request/commit/c0b81e6e7c1bb7e4a3e823c2e41692bc8ede0218)) + +### [5.3.1](https://www.github.com/googleapis/teeny-request/compare/v5.3.0...v5.3.1) (2019-10-29) + + +### Bug Fixes + +* correctly set compress/gzip option when false ([#95](https://www.github.com/googleapis/teeny-request/issues/95)) ([72ef307](https://www.github.com/googleapis/teeny-request/commit/72ef307364de542af3ef8581572b1897fca2bcf4)) + +## [5.3.0](https://www.github.com/googleapis/teeny-request/compare/v5.2.1...v5.3.0) (2019-10-09) + + +### Bug Fixes + +* **deps:** update dependency https-proxy-agent to v3 ([#89](https://www.github.com/googleapis/teeny-request/issues/89)) ([dfd52cc](https://www.github.com/googleapis/teeny-request/commit/dfd52cc)) + + +### Features + +* agent pooling ([#86](https://www.github.com/googleapis/teeny-request/issues/86)) ([b182f51](https://www.github.com/googleapis/teeny-request/commit/b182f51)) + +### [5.2.1](https://www.github.com/googleapis/teeny-request/compare/v5.2.0...v5.2.1) (2019-08-14) + + +### Bug Fixes + +* **types:** make types less strict for method ([#76](https://www.github.com/googleapis/teeny-request/issues/76)) ([9f07e98](https://www.github.com/googleapis/teeny-request/commit/9f07e98)) + +## [5.2.0](https://www.github.com/googleapis/teeny-request/compare/v5.1.3...v5.2.0) (2019-08-13) + + +### Bug Fixes + +* if scheme is http:// use an HTTP agent ([#75](https://www.github.com/googleapis/teeny-request/issues/75)) ([abdf846](https://www.github.com/googleapis/teeny-request/commit/abdf846)) +* remove unused logging ([#71](https://www.github.com/googleapis/teeny-request/issues/71)) ([4cb4967](https://www.github.com/googleapis/teeny-request/commit/4cb4967)) +* undefined headers breaks compatibility with auth ([#66](https://www.github.com/googleapis/teeny-request/issues/66)) ([12901a0](https://www.github.com/googleapis/teeny-request/commit/12901a0)) + + +### Features + +* support lazy-reading from response stream ([#74](https://www.github.com/googleapis/teeny-request/issues/74)) ([f6db420](https://www.github.com/googleapis/teeny-request/commit/f6db420)) +* support reading from the request stream ([#67](https://www.github.com/googleapis/teeny-request/issues/67)) ([ae23054](https://www.github.com/googleapis/teeny-request/commit/ae23054)) + + +### Reverts + +* do not pipe fetch response into user stream ([#72](https://www.github.com/googleapis/teeny-request/issues/72)) ([6ec812e](https://www.github.com/googleapis/teeny-request/commit/6ec812e)) + +### [5.1.3](https://www.github.com/googleapis/teeny-request/compare/v5.1.2...v5.1.3) (2019-08-06) + + +### Bug Fixes + +* duplex stream does not implement methods like _read ([#64](https://www.github.com/googleapis/teeny-request/issues/64)) ([22ee26c](https://www.github.com/googleapis/teeny-request/commit/22ee26c)) + +### [5.1.2](https://www.github.com/googleapis/teeny-request/compare/v5.1.1...v5.1.2) (2019-08-06) + + +### Bug Fixes + +* **types:** expand method and header types ([#61](https://www.github.com/googleapis/teeny-request/issues/61)) ([c04d2f1](https://www.github.com/googleapis/teeny-request/commit/c04d2f1)) + +### [5.1.1](https://www.github.com/googleapis/teeny-request/compare/v5.1.0...v5.1.1) (2019-07-23) + + +### Bug Fixes + +* support lowercase proxy env vars ([#56](https://www.github.com/googleapis/teeny-request/issues/56)) ([0b3e433](https://www.github.com/googleapis/teeny-request/commit/0b3e433)) + +## [5.1.0](https://www.github.com/googleapis/teeny-request/compare/v5.0.0...v5.1.0) (2019-07-19) + + +### Features + +* support forever option ([#54](https://www.github.com/googleapis/teeny-request/issues/54)) ([746d70e](https://www.github.com/googleapis/teeny-request/commit/746d70e)) + +## [5.0.0](https://www.github.com/googleapis/teeny-request/compare/v4.0.0...v5.0.0) (2019-07-15) + + +### ⚠ BREAKING CHANGES + +* this is our first release since moving into googleapis org; in the theme of "better safe than sorry" we're releasing as 5.0.0. + +### Bug Fixes + +* export types independent of @types/request ([#44](https://www.github.com/googleapis/teeny-request/issues/44)) ([fbe2b77](https://www.github.com/googleapis/teeny-request/commit/fbe2b77)) + + +### Reverts + +* revert 4.0.0 release in favor of 5.0.0 release ([#52](https://www.github.com/googleapis/teeny-request/issues/52)) ([f24499e](https://www.github.com/googleapis/teeny-request/commit/f24499e)) diff --git a/node_modules/teeny-request/LICENSE b/node_modules/teeny-request/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/teeny-request/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/teeny-request/README.md b/node_modules/teeny-request/README.md new file mode 100644 index 0000000..14e2f94 --- /dev/null +++ b/node_modules/teeny-request/README.md @@ -0,0 +1,95 @@ +[![Build Status](https://travis-ci.org/googleapis/teeny-request.svg?branch=master)](https://travis-ci.org/googleapis/teeny-request) + +# teeny-request + +Like `request`, but much smaller - and with less options. Uses `node-fetch` under the hood. +Pop it in where you would use `request`. Improves load and parse time of modules. + +```js +const request = require('teeny-request').teenyRequest; + +request({uri: 'http://ip.jsontest.com/'}, function (error, response, body) { + console.log('error:', error); // Print the error if one occurred + console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received + console.log('body:', body); // Print the JSON. +}); +``` + +For TypeScript, you can use `@types/request`. + +```ts +import {teenyRequest as request} from 'teeny-request'; +import * as r from 'request'; // Only for type declarations + +request({uri: 'http://ip.jsontest.com/'}, (error: any, response: r.Response, body: any) => { + console.log('error:', error); // Print the error if one occurred + console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received + console.log('body:', body); // Print the JSON. +}); +``` + + + +## teenyRequest(options, callback) + +Options are limited to the following + +* uri +* method, default GET +* headers +* json +* qs +* useQuerystring +* timeout in ms +* gzip +* proxy + +```ts +request({uri:'http://service.com/upload', method:'POST', json: {key:'value'}}, function(err,httpResponse,body){ /* ... */ }) +``` + +The callback argument gets 3 arguments: + + * An error when applicable (usually from http.ClientRequest object) + * A response object with statusCode, a statusMessage, and a body + * The third is the response body (JSON object) + +## defaults(options) + +Set default options for every `teenyRequest` call. + +```ts +let defaultRequest = teenyRequest.defaults({timeout: 60000}); + defaultRequest({uri: 'http://ip.jsontest.com/'}, function (error, response, body) { + assert.ifError(error); + assert.strictEqual(response.statusCode, 200); + console.log(body.ip); + assert.notEqual(body.ip, null); + + done(); + }); +``` + +## Proxy environment variables +If environment variables `HTTP_PROXY`, `HTTPS_PROXY`, or `NO_PROXY` are set, they are respected. + +## Building with Webpack 4+ +Since 4.0.0, Webpack uses `javascript/esm` for `.mjs` files which handles ESM more strictly compared to `javascript/auto`. If you get the error `Can't import the named export 'PassThrough' from non EcmaScript module`, please add the following to your Webpack config: + +```js +{ + test: /\.mjs$/, + type: 'javascript/auto', +}, +``` + +## Motivation +`request` has a ton of options and features and is accordingly large. Requiring a module incurs load and parse time. For +`request`, that is around 600ms. + +![Load time of request measured with require-so-slow](https://user-images.githubusercontent.com/101553/44694187-20357700-aa3a-11e8-9116-b8ae794cbc27.png) + +`teeny-request` doesn't have any of the bells and whistles that `request` has, but is so much faster to load. If startup time is an issue and you don't need much beyond a basic GET and POST, you can use `teeny-request`. + +## Thanks +Special thanks to [billyjacobson](https://github.com/billyjacobson) for suggesting the name. Please report all bugs to them. Just kidding. Please open issues. diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.d.ts b/node_modules/teeny-request/build/src/TeenyStatistics.d.ts new file mode 100644 index 0000000..669cf6e --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.d.ts @@ -0,0 +1,127 @@ +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export interface TeenyStatisticsOptions { + /** + * A positive number representing when to issue a warning about the number + * of concurrent requests using teeny-request. + * Set to 0 to disable this warning. + * Corresponds to the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment + * variable. + */ + concurrentRequests?: number; +} +type TeenyStatisticsConfig = Required; +/** + * TeenyStatisticsCounters is distinct from TeenyStatisticsOptions: + * Used when dumping current counters and other internal metrics. + */ +export interface TeenyStatisticsCounters { + concurrentRequests: number; +} +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +export declare class TeenyStatisticsWarning extends Error { + static readonly CONCURRENT_REQUESTS = "ConcurrentRequestsExceededWarning"; + threshold: number; + type: string; + value: number; + /** + * @param {string} message + */ + constructor(message: string); +} +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +export declare class TeenyStatistics { + /** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ + static readonly DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; + /** + * @type {TeenyStatisticsConfig} + * @private + */ + private _options; + /** + * @type {number} + * @private + * @default 0 + */ + private _concurrentRequests; + /** + * @type {boolean} + * @private + * @default false + */ + private _didConcurrentRequestWarn; + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts?: TeenyStatisticsOptions); + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions(): TeenyStatisticsOptions; + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts?: TeenyStatisticsOptions): TeenyStatisticsConfig; + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters(): TeenyStatisticsCounters; + /** + * @description Should call this right before making a request. + */ + requestStarting(): void; + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished(): void; + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + private static _prepareOptions; +} +export {}; diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.js b/node_modules/teeny-request/build/src/TeenyStatistics.js new file mode 100644 index 0000000..67d0b31 --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.js @@ -0,0 +1,156 @@ +"use strict"; +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/TeenyStatistics.js.map b/node_modules/teeny-request/build/src/TeenyStatistics.js.map new file mode 100644 index 0000000..e16f12f --- /dev/null +++ b/node_modules/teeny-request/build/src/TeenyStatistics.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TeenyStatistics.js","sourceRoot":"","sources":["../../src/TeenyStatistics.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAuBH;;;;;;GAMG;AACH,MAAa,sBAAuB,SAAQ,KAAK;IAO/C;;OAEG;IACH,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QARV,cAAS,GAAG,CAAC,CAAC;QACd,SAAI,GAAG,EAAE,CAAC;QACV,UAAK,GAAG,CAAC,CAAC;QAOf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;QAClC,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;IAClD,CAAC;;AAdH,wDAeC;AAdiB,0CAAmB,GAAG,mCAAmC,AAAtC,CAAuC;AAgB5E;;;;GAIG;AACH,MAAa,eAAe;IA+B1B;;OAEG;IACH,YAAY,IAA6B;QAjBzC;;;;WAIG;QACK,wBAAmB,GAAG,CAAC,CAAC;QAEhC;;;;WAIG;QACK,8BAAyB,GAAG,KAAK,CAAC;QAMxC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACH,UAAU;QACR,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,UAAU,CAAC,IAA6B;QACtC,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC;QAC9B,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QACtD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;OAGG;IACH,IAAI,QAAQ;QACV,OAAO;YACL,kBAAkB,EAAE,IAAI,CAAC,mBAAmB;SAC7C,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,eAAe;QACb,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAE3B,IACE,IAAI,CAAC,QAAQ,CAAC,kBAAkB,GAAG,CAAC;YACpC,IAAI,CAAC,mBAAmB,IAAI,IAAI,CAAC,QAAQ,CAAC,kBAAkB;YAC5D,CAAC,IAAI,CAAC,yBAAyB,EAC/B;YACA,IAAI,CAAC,yBAAyB,GAAG,IAAI,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,sBAAsB,CACxC,mDAAmD;gBACjD,IAAI,CAAC,mBAAmB;gBACxB,iEAAiE;gBACjE,IAAI,CAAC,QAAQ,CAAC,kBAAkB;gBAChC,+DAA+D;gBAC/D,gEAAgE;gBAChE,uCAAuC,CAC1C,CAAC;YACF,OAAO,CAAC,IAAI,GAAG,sBAAsB,CAAC,mBAAmB,CAAC;YAC1D,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,mBAAmB,CAAC;YACzC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,kBAAkB,CAAC;YACrD,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9B;IACH,CAAC;IAED;;;OAGG;IACH,eAAe;QACb,iBAAiB;QACjB,IAAI,CAAC,mBAAmB,EAAE,CAAC;IAC7B,CAAC;IAED;;;;;;;;;OASG;IACK,MAAM,CAAC,eAAe,CAAC,EAC7B,kBAAkB,EAAE,oBAAoB,MACd,EAAE;QAC5B,IAAI,kBAAkB,GAAG,IAAI,CAAC,gCAAgC,CAAC;QAE/D,MAAM,qBAAqB,GAAG,MAAM,CAClC,OAAO,CAAC,GAAG,CAAC,sCAAsC,CACnD,CAAC;QACF,IAAI,oBAAoB,KAAK,SAAS,EAAE;YACtC,kBAAkB,GAAG,oBAAoB,CAAC;SAC3C;aAAM,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;YAC/C,kBAAkB,GAAG,qBAAqB,CAAC;SAC5C;QAED,OAAO,EAAC,kBAAkB,EAAC,CAAC;IAC9B,CAAC;;AAnIH,0CAoIC;AAnIC;;;;;;;GAOG;AACa,gDAAgC,GAAG,IAAI,AAAP,CAAQ"} \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/agents.d.ts b/node_modules/teeny-request/build/src/agents.d.ts new file mode 100644 index 0000000..afa86ba --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.d.ts @@ -0,0 +1,32 @@ +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +/// +import { Agent as HTTPAgent } from 'http'; +import { Agent as HTTPSAgent } from 'https'; +import { Options } from './'; +export declare const pool: Map; +export type HttpAnyAgent = HTTPAgent | HTTPSAgent; +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +export declare function getAgent(uri: string, reqOpts: Options): HttpAnyAgent | undefined; diff --git a/node_modules/teeny-request/build/src/agents.js b/node_modules/teeny-request/build/src/agents.js new file mode 100644 index 0000000..07c0e44 --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.js @@ -0,0 +1,89 @@ +"use strict"; +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAgent = exports.pool = void 0; +const http_1 = require("http"); +const https_1 = require("https"); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = require("url"); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? require('http-proxy-agent') + : require('https-proxy-agent'); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/agents.js.map b/node_modules/teeny-request/build/src/agents.js.map new file mode 100644 index 0000000..a3e1403 --- /dev/null +++ b/node_modules/teeny-request/build/src/agents.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agents.js","sourceRoot":"","sources":["../../src/agents.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAEH,+BAAwC;AACxC,iCAA0C;AAC1C,kDAAkD;AAClD,6BAA0B;AAGb,QAAA,IAAI,GAAG,IAAI,GAAG,EAAqB,CAAC;AAIjD;;;;;GAKG;AACH,SAAS,oBAAoB,CAAC,GAAW;IACvC,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;IAChE,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAC;KACb;IAED,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC;IAE9B,KAAK,MAAM,UAAU,IAAI,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;QAC9C,MAAM,OAAO,GAAG,UAAU,CAAC,IAAI,EAAE,CAAC;QAElC,IAAI,OAAO,KAAK,QAAQ,CAAC,MAAM,IAAI,OAAO,KAAK,QAAQ,CAAC,QAAQ,EAAE;YAChE,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC9D,MAAM,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAEtD,IAAI,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE;gBAC/C,OAAO,KAAK,CAAC;aACd;SACF;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;GAOG;AACH,SAAgB,QAAQ,CACtB,GAAW,EACX,OAAgB;IAEhB,MAAM,MAAM,GAAG,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IACzC,MAAM,KAAK,GACT,OAAO,CAAC,KAAK;QACb,OAAO,CAAC,GAAG,CAAC,UAAU;QACtB,OAAO,CAAC,GAAG,CAAC,UAAU;QACtB,OAAO,CAAC,GAAG,CAAC,WAAW;QACvB,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC;IAE1B,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEpD,MAAM,qBAAqB,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;IAC9C,MAAM,cAAc,GAAG,qBAAqB,IAAI,oBAAoB,CAAC,GAAG,CAAC,CAAC;IAE1E,IAAI,KAAK,IAAI,cAAc,EAAE;QAC3B,yCAAyC;QACzC,MAAM,KAAK,GAAG,MAAM;YAClB,CAAC,CAAC,OAAO,CAAC,kBAAkB,CAAC;YAC7B,CAAC,CAAC,OAAO,CAAC,mBAAmB,CAAC,CAAC;QAEjC,MAAM,SAAS,GAAG,EAAC,GAAG,IAAA,WAAK,EAAC,KAAK,CAAC,EAAE,GAAG,WAAW,EAAC,CAAC;QACpD,OAAO,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC;KAC7B;IAED,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC;IAEpC,IAAI,OAAO,CAAC,OAAO,EAAE;QACnB,GAAG,IAAI,UAAU,CAAC;QAElB,IAAI,CAAC,YAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YAClB,yCAAyC;YACzC,MAAM,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,YAAS,CAAC,CAAC,CAAC,aAAU,CAAC;YAC9C,YAAI,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,KAAK,CAAC,EAAC,GAAG,WAAW,EAAE,SAAS,EAAE,IAAI,EAAC,CAAC,CAAC,CAAC;SAC7D;KACF;IAED,OAAO,YAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;AACvB,CAAC;AAxCD,4BAwCC"} \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/index.d.ts b/node_modules/teeny-request/build/src/index.d.ts new file mode 100644 index 0000000..f5585b9 --- /dev/null +++ b/node_modules/teeny-request/build/src/index.d.ts @@ -0,0 +1,76 @@ +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/// +/// +/// +import { Agent, AgentOptions as HttpsAgentOptions } from 'https'; +import { AgentOptions as HttpAgentOptions } from 'http'; +import { PassThrough, Readable } from 'stream'; +import { TeenyStatistics } from './TeenyStatistics'; +export interface CoreOptions { + method?: string; + timeout?: number; + gzip?: boolean; + json?: any; + headers?: Headers; + body?: string | {}; + useQuerystring?: boolean; + qs?: any; + proxy?: string; + multipart?: RequestPart[]; + forever?: boolean; + pool?: HttpsAgentOptions | HttpAgentOptions; +} +export interface OptionsWithUri extends CoreOptions { + uri: string; +} +export interface OptionsWithUrl extends CoreOptions { + url: string; +} +export type Options = OptionsWithUri | OptionsWithUrl; +export interface Request extends PassThrough { + agent: Agent | false; + headers: Headers; + href?: string; +} +export interface Response { + statusCode: number; + headers: Headers; + body: T; + request: Request; + statusMessage?: string; +} +export interface RequestPart { + body: string | Readable; +} +export interface RequestCallback { + (err: Error | null, response: Response, body?: T): void; +} +export declare class RequestError extends Error { + code?: number; +} +interface Headers { + [index: string]: any; +} +declare function teenyRequest(reqOpts: Options): Request; +declare function teenyRequest(reqOpts: Options, callback: RequestCallback): void; +declare namespace teenyRequest { + var defaults: (defaults: CoreOptions) => (reqOpts: Options, callback?: RequestCallback | undefined) => void | Request; + var stats: TeenyStatistics; + var resetStats: () => void; +} +export { teenyRequest }; diff --git a/node_modules/teeny-request/build/src/index.js b/node_modules/teeny-request/build/src/index.js new file mode 100644 index 0000000..af5d15e --- /dev/null +++ b/node_modules/teeny-request/build/src/index.js @@ -0,0 +1,253 @@ +"use strict"; +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = require("node-fetch"); +const stream_1 = require("stream"); +const uuid = require("uuid"); +const agents_1 = require("./agents"); +const TeenyStatistics_1 = require("./TeenyStatistics"); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = require('stream-events'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = require('querystring'); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/build/src/index.js.map b/node_modules/teeny-request/build/src/index.js.map new file mode 100644 index 0000000..1513c49 --- /dev/null +++ b/node_modules/teeny-request/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;;AAIH,2CAAuC;AACvC,mCAAuD;AACvD,6BAA6B;AAC7B,qCAAkC;AAClC,uDAAkD;AAClD,8DAA8D;AAC9D,MAAM,YAAY,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;AAqD9C,MAAa,YAAa,SAAQ,KAAK;CAEtC;AAFD,oCAEC;AAOD;;;;GAIG;AACH,SAAS,qBAAqB,CAAC,OAAgB;IAC7C,MAAM,OAAO,GAAkB;QAC7B,MAAM,EAAE,OAAO,CAAC,MAAM,IAAI,KAAK;QAC/B,GAAG,CAAC,OAAO,CAAC,OAAO,IAAI,EAAC,OAAO,EAAE,OAAO,CAAC,OAAO,EAAC,CAAC;QAClD,GAAG,CAAC,OAAO,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,EAAC,QAAQ,EAAE,OAAO,CAAC,IAAI,EAAC,CAAC;KACnE,CAAC;IAEF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE;QACpC,4CAA4C;QAC5C,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC;QACxC,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC;QAErD,2CAA2C;QAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7C;SAAM;QACL,IAAI,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;YACjC,OAAO,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;SAC7B;aAAM,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE;YAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;SAC7C;aAAM;YACL,OAAO,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;SAC7B;KACF;IAED,8DAA8D;IAC9D,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC,OAAc,CAAC;IAEzC,IAAI,GAAG,GAAG,CAAE,OAA0B,CAAC,GAAG;QACvC,OAA0B,CAAC,GAAG,CAAW,CAAC;IAE7C,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;KACnD;IAED,IAAI,OAAO,CAAC,cAAc,KAAK,IAAI,IAAI,OAAO,OAAO,CAAC,EAAE,KAAK,QAAQ,EAAE;QACrE,8DAA8D;QAC9D,MAAM,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;QAClC,MAAM,MAAM,GAAG,EAAE,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACxC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,MAAM,CAAC;KAC1B;IAED,OAAO,CAAC,KAAK,GAAG,IAAA,iBAAQ,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IAEvC,OAAO,EAAC,GAAG,EAAE,OAAO,EAAC,CAAC;AACxB,CAAC;AAED;;;;;;GAMG;AACH,SAAS,sBAAsB,CAAC,IAAmB,EAAE,GAAe;IAClE,MAAM,OAAO,GAAG,EAAa,CAAC;IAC9B,OAAO,CAAC,KAAK,GAAI,IAAI,CAAC,KAAe,IAAI,KAAK,CAAC;IAC/C,OAAO,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE,CAAY,CAAC;IAClD,OAAO,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,CAAC;IACvB,oDAAoD;IACpD,MAAM,UAAU,GAAG,EAAa,CAAC;IACjC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC;IAE/D,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE;QACvC,UAAU,EAAE,GAAG,CAAC,MAAM;QACtB,aAAa,EAAE,GAAG,CAAC,UAAU;QAC7B,OAAO;QACP,IAAI,EAAE,GAAG,CAAC,IAAI;QACd,OAAO,EAAE,UAAU;QACnB,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,EAAC,OAAO,EAAE,UAAU,EAAC,CAAC;KACtC,CAAC,CAAC;IAEH,OAAO,QAAoB,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,SAAS,qBAAqB,CAAC,QAAgB,EAAE,SAAwB;IACvE,MAAM,MAAM,GAAG,KAAK,QAAQ,IAAI,CAAC;IACjC,MAAM,MAAM,GAAgB,IAAI,oBAAW,EAAE,CAAC;IAE9C,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE;QAC5B,MAAM,QAAQ,GAAG,KAAK,QAAQ,qBAC3B,IAAoC,CAAC,cAAc,CACtD,UAAU,CAAC;QACX,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QACvB,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;SACtB;aAAM;YACL,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAC,GAAG,EAAE,KAAK,EAAC,CAAC,CAAC;YACrC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;gBACvB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBACrB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBACrB,MAAM,CAAC,GAAG,EAAE,CAAC;YACf,CAAC,CAAC,CAAC;SACJ;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAID,SAAS,YAAY,CACnB,OAAgB,EAChB,QAA0B;IAE1B,MAAM,EAAC,GAAG,EAAE,OAAO,EAAC,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;IAEtD,MAAM,SAAS,GAAG,OAAO,CAAC,SAA0B,CAAC;IACrD,IAAI,OAAO,CAAC,SAAS,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;QAC/C,IAAI,CAAC,QAAQ,EAAE;YACb,4DAA4D;YAC5D,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;SACnE;QACD,MAAM,QAAQ,GAAW,IAAI,CAAC,EAAE,EAAE,CAAC;QAClC,OAAO,CAAC,OAAmB,CAC1B,cAAc,CACf,GAAG,+BAA+B,QAAQ,EAAE,CAAC;QAC9C,OAAO,CAAC,IAAI,GAAG,qBAAqB,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;QAE1D,mBAAmB;QACnB,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,MAAM,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC/C,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,IACE,MAAM,KAAK,kBAAkB;gBAC7B,MAAM,KAAK,iCAAiC,EAC5C;gBACA,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;oBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;oBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBACjC,CAAC,EACD,CAAC,GAAU,EAAE,EAAE;oBACb,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBAChC,CAAC,CACF,CAAC;gBACF,OAAO;aACR;YAED,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;gBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;gBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACjC,CAAC,EACD,GAAG,CAAC,EAAE;gBACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YAChC,CAAC,CACF,CAAC;QACJ,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,QAAQ,CAAC,GAAG,EAAE,IAAK,EAAE,IAAI,CAAC,CAAC;QAC7B,CAAC,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI,QAAQ,KAAK,SAAS,EAAE;QAC1B,cAAc;QACd,MAAM,aAAa,GAAG,YAAY,CAAC,IAAI,oBAAW,EAAE,CAAC,CAAC;QACtD,8DAA8D;QAC9D,IAAI,cAAmB,CAAC;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,EAAE;YACjC,IAAI,cAAc,EAAE;gBAClB,IAAA,iBAAQ,EAAC,cAAc,EAAE,aAAa,EAAE,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;aACnD;iBAAM;gBACL,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;oBAClC,IAAA,iBAAQ,EAAC,cAAc,EAAE,aAAa,EAAE,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;gBACpD,CAAC,CAAC,CAAC;aACJ;QACH,CAAC,CAAC,CAAC;QACH,OAAO,CAAC,QAAQ,GAAG,KAAK,CAAC;QAEzB,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,cAAc,GAAG,GAAG,CAAC,IAAI,CAAC;YAE1B,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;gBACxC,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACnC,CAAC,CAAC,CAAC;YAEH,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;YACrC,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACnC,CAAC,CACF,CAAC;QAEF,uDAAuD;QACvD,qDAAqD;QACrD,UAAU;QACV,OAAO,aAAwB,CAAC;KACjC;IAED,4BAA4B;IAC5B,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;IACrC,IAAA,oBAAK,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC,IAAI,CACtB,GAAG,CAAC,EAAE;QACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,MAAM,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;QAC/C,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACtD,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC;QAC3B,IACE,MAAM,KAAK,kBAAkB;YAC7B,MAAM,KAAK,iCAAiC,EAC5C;YACA,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;gBAC/B,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;gBAC/B,OAAO;aACR;YACD,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;gBACL,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;gBACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACjC,CAAC,EACD,GAAG,CAAC,EAAE;gBACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YAChC,CAAC,CACF,CAAC;YACF,OAAO;SACR;QAED,GAAG,CAAC,IAAI,EAAE,CAAC,IAAI,CACb,IAAI,CAAC,EAAE;YACL,MAAM,QAAQ,GAAG,sBAAsB,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC;YACrB,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;QACjC,CAAC,EACD,GAAG,CAAC,EAAE;YACJ,QAAQ,CAAC,GAAG,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;QAChC,CAAC,CACF,CAAC;IACJ,CAAC,EACD,GAAG,CAAC,EAAE;QACJ,YAAY,CAAC,KAAK,CAAC,eAAe,EAAE,CAAC;QACrC,QAAQ,CAAC,GAAG,EAAE,IAAK,EAAE,IAAI,CAAC,CAAC;IAC7B,CAAC,CACF,CAAC;IACF,OAAO;AACT,CAAC;AAqBO,oCAAY;AAnBpB,YAAY,CAAC,QAAQ,GAAG,CAAC,QAAqB,EAAE,EAAE;IAChD,OAAO,CAAC,OAAgB,EAAE,QAA0B,EAAkB,EAAE;QACtE,MAAM,IAAI,GAAG,EAAC,GAAG,QAAQ,EAAE,GAAG,OAAO,EAAC,CAAC;QACvC,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;SAC3B;QACD,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/B,CAAC,CAAC;AACJ,CAAC,CAAC;AAEF;;GAEG;AACH,YAAY,CAAC,KAAK,GAAG,IAAI,iCAAe,EAAE,CAAC;AAE3C,YAAY,CAAC,UAAU,GAAG,GAAS,EAAE;IACnC,YAAY,CAAC,KAAK,GAAG,IAAI,iCAAe,CAAC,YAAY,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC;AAC5E,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/.bin/uuid b/node_modules/teeny-request/node_modules/.bin/uuid new file mode 100644 index 0000000..c3ec003 --- /dev/null +++ b/node_modules/teeny-request/node_modules/.bin/uuid @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../uuid/dist/bin/uuid" "$@" +else + exec node "$basedir/../uuid/dist/bin/uuid" "$@" +fi diff --git a/node_modules/teeny-request/node_modules/.bin/uuid.cmd b/node_modules/teeny-request/node_modules/.bin/uuid.cmd new file mode 100644 index 0000000..0f2376e --- /dev/null +++ b/node_modules/teeny-request/node_modules/.bin/uuid.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\uuid\dist\bin\uuid" %* diff --git a/node_modules/teeny-request/node_modules/.bin/uuid.ps1 b/node_modules/teeny-request/node_modules/.bin/uuid.ps1 new file mode 100644 index 0000000..7804628 --- /dev/null +++ b/node_modules/teeny-request/node_modules/.bin/uuid.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/teeny-request/node_modules/agent-base/README.md b/node_modules/teeny-request/node_modules/agent-base/README.md new file mode 100644 index 0000000..256f1f3 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/README.md @@ -0,0 +1,145 @@ +agent-base +========== +### Turn a function into an [`http.Agent`][http.Agent] instance +[![Build Status](https://github.com/TooTallNate/node-agent-base/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-agent-base/actions?workflow=Node+CI) + +This module provides an `http.Agent` generator. That is, you pass it an async +callback function, and it returns a new `http.Agent` instance that will invoke the +given callback function when sending outbound HTTP requests. + +#### Some subclasses: + +Here's some more interesting uses of `agent-base`. +Send a pull request to list yours! + + * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints + * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints + * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS + * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install agent-base +``` + + +Example +------- + +Here's a minimal example that creates a new `net.Socket` connection to the server +for every HTTP request (i.e. the equivalent of `agent: false` option): + +```js +var net = require('net'); +var tls = require('tls'); +var url = require('url'); +var http = require('http'); +var agent = require('agent-base'); + +var endpoint = 'http://nodejs.org/api/'; +var parsed = url.parse(endpoint); + +// This is the important part! +parsed.agent = agent(function (req, opts) { + var socket; + // `secureEndpoint` is true when using the https module + if (opts.secureEndpoint) { + socket = tls.connect(opts); + } else { + socket = net.connect(opts); + } + return socket; +}); + +// Everything else works just like normal... +http.get(parsed, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +Returning a Promise or using an `async` function is also supported: + +```js +agent(async function (req, opts) { + await sleep(1000); + // etc… +}); +``` + +Return another `http.Agent` instance to "pass through" the responsibility +for that HTTP request to that agent: + +```js +agent(function (req, opts) { + return opts.secureEndpoint ? https.globalAgent : http.globalAgent; +}); +``` + + +API +--- + +## Agent(Function callback[, Object options]) → [http.Agent][] + +Creates a base `http.Agent` that will execute the callback function `callback` +for every HTTP request that it is used as the `agent` for. The callback function +is responsible for creating a `stream.Duplex` instance of some kind that will be +used as the underlying socket in the HTTP request. + +The `options` object accepts the following properties: + + * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional). + +The callback function should have the following signature: + +### callback(http.ClientRequest req, Object options, Function cb) → undefined + +The ClientRequest `req` can be accessed to read request headers and +and the path, etc. The `options` object contains the options passed +to the `http.request()`/`https.request()` function call, and is formatted +to be directly passed to `net.connect()`/`tls.connect()`, or however +else you want a Socket to be created. Pass the created socket to +the callback function `cb` once created, and the HTTP request will +continue to proceed. + +If the `https` module is used to invoke the HTTP request, then the +`secureEndpoint` property on `options` _will be set to `true`_. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent +[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent +[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent +[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent +[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/index.d.ts b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.d.ts new file mode 100644 index 0000000..bc4ab74 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.d.ts @@ -0,0 +1,78 @@ +/// +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +declare function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +declare function createAgent(callback: createAgent.AgentCallback, opts?: createAgent.AgentOptions): createAgent.Agent; +declare namespace createAgent { + interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + interface AgentRequestOptions { + host?: string; + path?: string; + port: number; + } + interface HttpRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: false; + } + interface HttpsRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: true; + } + type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + type AgentLike = Pick | http.Agent; + type AgentCallbackReturn = Duplex | AgentLike; + type AgentCallbackCallback = (err?: Error | null, socket?: createAgent.AgentCallbackReturn) => void; + type AgentCallbackPromise = (req: createAgent.ClientRequest, opts: createAgent.RequestOptions) => createAgent.AgentCallbackReturn | Promise; + type AgentCallback = typeof Agent.prototype.callback; + type AgentOptions = { + timeout?: number; + }; + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends EventEmitter { + timeout: number | null; + maxFreeSockets: number; + maxTotalSockets: number; + maxSockets: number; + sockets: { + [key: string]: net.Socket[]; + }; + freeSockets: { + [key: string]: net.Socket[]; + }; + requests: { + [key: string]: http.IncomingMessage[]; + }; + options: https.AgentOptions; + private promisifiedCallback?; + private explicitDefaultPort?; + private explicitProtocol?; + constructor(callback?: createAgent.AgentCallback | createAgent.AgentOptions, _opts?: createAgent.AgentOptions); + get defaultPort(): number; + set defaultPort(v: number); + get protocol(): string; + set protocol(v: string); + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions, fn: createAgent.AgentCallbackCallback): void; + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions): createAgent.AgentCallbackReturn | Promise; + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void; + freeSocket(socket: net.Socket, opts: AgentOptions): void; + destroy(): void; + } +} +export = createAgent; diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js new file mode 100644 index 0000000..bfd9e22 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js @@ -0,0 +1,203 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = require("events"); +const debug_1 = __importDefault(require("debug")); +const promisify_1 = __importDefault(require("./promisify")); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js.map b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js.map new file mode 100644 index 0000000..bd118ab --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAIA,mCAAsC;AACtC,kDAAgC;AAChC,4DAAoC;AAEpC,MAAM,KAAK,GAAG,eAAW,CAAC,YAAY,CAAC,CAAC;AAExC,SAAS,OAAO,CAAC,CAAM;IACtB,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,UAAU,CAAC;AACzD,CAAC;AAED,SAAS,gBAAgB;IACxB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,CAAC;IAC9B,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,OAAO,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,IAAK,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACxG,CAAC;AAOD,SAAS,WAAW,CACnB,QAA+D,EAC/D,IAA+B;IAE/B,OAAO,IAAI,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,WAAU,WAAW;IAmDpB;;;;;;OAMG;IACH,MAAa,KAAM,SAAQ,qBAAY;QAmBtC,YACC,QAA+D,EAC/D,KAAgC;YAEhC,KAAK,EAAE,CAAC;YAER,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;gBACnC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;aACzB;iBAAM,IAAI,QAAQ,EAAE;gBACpB,IAAI,GAAG,QAAQ,CAAC;aAChB;YAED,0DAA0D;YAC1D,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;aAC5B;YAED,+DAA+D;YAC/D,0DAA0D;YAC1D,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;YACxB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YACpB,IAAI,CAAC,eAAe,GAAG,QAAQ,CAAC;YAChC,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;YAClB,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;YACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QACnB,CAAC;QAED,IAAI,WAAW;YACd,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;gBACjD,OAAO,IAAI,CAAC,mBAAmB,CAAC;aAChC;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QAED,IAAI,WAAW,CAAC,CAAS;YACxB,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,QAAQ;YACX,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,QAAQ,EAAE;gBAC9C,OAAO,IAAI,CAAC,gBAAgB,CAAC;aAC7B;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;QAChD,CAAC;QAED,IAAI,QAAQ,CAAC,CAAS;YACrB,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC;QAC3B,CAAC;QAaD,QAAQ,CACP,GAA8B,EAC9B,IAA8B,EAC9B,EAAsC;YAKtC,MAAM,IAAI,KAAK,CACd,yFAAyF,CACzF,CAAC;QACH,CAAC;QAED;;;;;WAKG;QACH,UAAU,CAAC,GAAkB,EAAE,KAAqB;YACnD,MAAM,IAAI,qBAAwB,KAAK,CAAE,CAAC;YAE1C,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;gBAC7C,IAAI,CAAC,cAAc,GAAG,gBAAgB,EAAE,CAAC;aACzC;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;aACxB;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;aACzD;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE;gBAC3B,2DAA2D;gBAC3D,0DAA0D;gBAC1D,4DAA4D;gBAC5D,8CAA8C;gBAC9C,OAAO,IAAI,CAAC,IAAI,CAAC;aACjB;YAED,OAAO,IAAI,CAAC,KAAK,CAAC;YAClB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACrB,OAAO,IAAI,CAAC,aAAa,CAAC;YAC1B,OAAO,IAAI,CAAC,WAAW,CAAC;YACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC;YAE7B,kCAAkC;YAClC,2CAA2C;YAC3C,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;YACjB,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAE5B,IAAI,QAAQ,GAAG,KAAK,CAAC;YACrB,IAAI,SAAS,GAAyC,IAAI,CAAC;YAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAE/C,MAAM,OAAO,GAAG,CAAC,GAA0B,EAAE,EAAE;gBAC9C,IAAI,GAAG,CAAC,SAAS;oBAAE,OAAO;gBAC1B,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBACvB,yDAAyD;gBACzD,iEAAiE;gBACjE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,CAAC,CAAC;YAEF,MAAM,SAAS,GAAG,GAAG,EAAE;gBACtB,SAAS,GAAG,IAAI,CAAC;gBACjB,QAAQ,GAAG,IAAI,CAAC;gBAChB,MAAM,GAAG,GAA0B,IAAI,KAAK,CAC3C,sDAAsD,SAAS,IAAI,CACnE,CAAC;gBACF,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,aAAa,GAAG,CAAC,GAA0B,EAAE,EAAE;gBACpD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,KAAK,IAAI,EAAE;oBACvB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBACD,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,QAAQ,GAAG,CAAC,MAA2B,EAAE,EAAE;gBAChD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,IAAI,IAAI,EAAE;oBACtB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBAED,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE;oBACpB,oDAAoD;oBACpD,wDAAwD;oBACxD,eAAe;oBACf,KAAK,CACJ,6CAA6C,EAC7C,MAAM,CAAC,WAAW,CAAC,IAAI,CACvB,CAAC;oBACD,MAA4B,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpD,OAAO;iBACP;gBAED,IAAI,MAAM,EAAE;oBACX,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE;wBACxB,IAAI,CAAC,UAAU,CAAC,MAAoB,EAAE,IAAI,CAAC,CAAC;oBAC7C,CAAC,CAAC,CAAC;oBACH,GAAG,CAAC,QAAQ,CAAC,MAAoB,CAAC,CAAC;oBACnC,OAAO;iBACP;gBAED,MAAM,GAAG,GAAG,IAAI,KAAK,CACpB,qDAAqD,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,IAAI,CAC/E,CAAC;gBACF,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACxC,OAAO,CAAC,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC,CAAC;gBAChD,OAAO;aACP;YAED,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;gBAC9B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE;oBAC9B,KAAK,CAAC,gDAAgD,CAAC,CAAC;oBACxD,IAAI,CAAC,mBAAmB,GAAG,mBAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;iBACpD;qBAAM;oBACN,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,QAAQ,CAAC;iBACzC;aACD;YAED,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,GAAG,CAAC,EAAE;gBACnD,SAAS,GAAG,UAAU,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;aAC7C;YAED,IAAI,MAAM,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACpD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI;gBACH,KAAK,CACJ,qCAAqC,EACrC,IAAI,CAAC,QAAQ,EACb,GAAG,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,CAC3B,CAAC;gBACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CACxD,QAAQ,EACR,aAAa,CACb,CAAC;aACF;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;aACzC;QACF,CAAC;QAED,UAAU,CAAC,MAAkB,EAAE,IAAkB;YAChD,KAAK,CAAC,sBAAsB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAC7D,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,CAAC;QAED,OAAO;YACN,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrD,CAAC;KACD;IAxPY,iBAAK,QAwPjB,CAAA;IAED,uCAAuC;IACvC,WAAW,CAAC,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC;AACrD,CAAC,EAtTS,WAAW,KAAX,WAAW,QAsTpB;AAED,iBAAS,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.d.ts b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.d.ts new file mode 100644 index 0000000..0268869 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.d.ts @@ -0,0 +1,4 @@ +import { ClientRequest, RequestOptions, AgentCallbackCallback, AgentCallbackPromise } from './index'; +declare type LegacyCallback = (req: ClientRequest, opts: RequestOptions, fn: AgentCallbackCallback) => void; +export default function promisify(fn: LegacyCallback): AgentCallbackPromise; +export {}; diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js new file mode 100644 index 0000000..b2f6132 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports.default = promisify; +//# sourceMappingURL=promisify.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js.map b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js.map new file mode 100644 index 0000000..4bff9bf --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/dist/src/promisify.js.map @@ -0,0 +1 @@ +{"version":3,"file":"promisify.js","sourceRoot":"","sources":["../../src/promisify.ts"],"names":[],"mappings":";;AAeA,SAAwB,SAAS,CAAC,EAAkB;IACnD,OAAO,UAAsB,GAAkB,EAAE,IAAoB;QACpE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,EAAE,CAAC,IAAI,CACN,IAAI,EACJ,GAAG,EACH,IAAI,EACJ,CAAC,GAA6B,EAAE,GAAyB,EAAE,EAAE;gBAC5D,IAAI,GAAG,EAAE;oBACR,MAAM,CAAC,GAAG,CAAC,CAAC;iBACZ;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,CAAC;iBACb;YACF,CAAC,CACD,CAAC;QACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;AACH,CAAC;AAjBD,4BAiBC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/agent-base/package.json b/node_modules/teeny-request/node_modules/agent-base/package.json new file mode 100644 index 0000000..fadce3a --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/package.json @@ -0,0 +1,64 @@ +{ + "name": "agent-base", + "version": "6.0.2", + "description": "Turn a function into an `http.Agent` instance", + "main": "dist/src/index", + "typings": "dist/src/index", + "files": [ + "dist/src", + "src" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "postbuild": "cpy --parents src test '!**/*.ts' dist", + "test": "mocha --reporter spec dist/test/*.js", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-agent-base.git" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-agent-base/issues" + }, + "dependencies": { + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/mocha": "^5.2.7", + "@types/node": "^14.0.20", + "@types/semver": "^7.1.0", + "@types/ws": "^6.0.3", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "async-listen": "^1.2.0", + "cpy-cli": "^2.0.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.0", + "rimraf": "^3.0.0", + "semver": "^7.1.2", + "typescript": "^3.5.3", + "ws": "^3.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } +} diff --git a/node_modules/teeny-request/node_modules/agent-base/src/index.ts b/node_modules/teeny-request/node_modules/agent-base/src/index.ts new file mode 100644 index 0000000..a47ccd4 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/src/index.ts @@ -0,0 +1,345 @@ +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +import createDebug from 'debug'; +import promisify from './promisify'; + +const debug = createDebug('agent-base'); + +function isAgent(v: any): v is createAgent.AgentLike { + return Boolean(v) && typeof v.addRequest === 'function'; +} + +function isSecureEndpoint(): boolean { + const { stack } = new Error(); + if (typeof stack !== 'string') return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} + +function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +function createAgent( + callback: createAgent.AgentCallback, + opts?: createAgent.AgentOptions +): createAgent.Agent; +function createAgent( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + opts?: createAgent.AgentOptions +) { + return new createAgent.Agent(callback, opts); +} + +namespace createAgent { + export interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + + export interface AgentRequestOptions { + host?: string; + path?: string; + // `port` on `http.RequestOptions` can be a string or undefined, + // but `net.TcpNetConnectOpts` expects only a number + port: number; + } + + export interface HttpRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: false; + } + + export interface HttpsRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: true; + } + + export type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + + export type AgentLike = Pick | http.Agent; + + export type AgentCallbackReturn = Duplex | AgentLike; + + export type AgentCallbackCallback = ( + err?: Error | null, + socket?: createAgent.AgentCallbackReturn + ) => void; + + export type AgentCallbackPromise = ( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ) => + | createAgent.AgentCallbackReturn + | Promise; + + export type AgentCallback = typeof Agent.prototype.callback; + + export type AgentOptions = { + timeout?: number; + }; + + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + export class Agent extends EventEmitter { + public timeout: number | null; + public maxFreeSockets: number; + public maxTotalSockets: number; + public maxSockets: number; + public sockets: { + [key: string]: net.Socket[]; + }; + public freeSockets: { + [key: string]: net.Socket[]; + }; + public requests: { + [key: string]: http.IncomingMessage[]; + }; + public options: https.AgentOptions; + private promisifiedCallback?: createAgent.AgentCallbackPromise; + private explicitDefaultPort?: number; + private explicitProtocol?: string; + + constructor( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + _opts?: createAgent.AgentOptions + ) { + super(); + + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } else if (callback) { + opts = callback; + } + + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + + get defaultPort(): number { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + + set defaultPort(v: number) { + this.explicitDefaultPort = v; + } + + get protocol(): string { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + + set protocol(v: string) { + this.explicitProtocol = v; + } + + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions, + fn: createAgent.AgentCallbackCallback + ): void; + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ): + | createAgent.AgentCallbackReturn + | Promise; + callback( + req: createAgent.ClientRequest, + opts: createAgent.AgentOptions, + fn?: createAgent.AgentCallbackCallback + ): + | createAgent.AgentCallbackReturn + | Promise + | void { + throw new Error( + '"agent-base" has no default implementation, you must subclass and override `callback()`' + ); + } + + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void { + const opts: RequestOptions = { ..._opts }; + + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + + if (opts.host == null) { + opts.host = 'localhost'; + } + + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + + let timedOut = false; + let timeoutId: ReturnType | null = null; + const timeoutMs = opts.timeout || this.timeout; + + const onerror = (err: NodeJS.ErrnoException) => { + if (req._hadError) return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err: NodeJS.ErrnoException = new Error( + `A "socket" was not created for HTTP request before ${timeoutMs}ms` + ); + err.code = 'ETIMEOUT'; + onerror(err); + }; + + const callbackError = (err: NodeJS.ErrnoException) => { + if (timedOut) return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + + const onsocket = (socket: AgentCallbackReturn) => { + if (timedOut) return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug( + 'Callback returned another Agent instance %o', + socket.constructor.name + ); + (socket as createAgent.Agent).addRequest(req, opts); + return; + } + + if (socket) { + socket.once('free', () => { + this.freeSocket(socket as net.Socket, opts); + }); + req.onSocket(socket as net.Socket); + return; + } + + const err = new Error( + `no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\`` + ); + onerror(err); + }; + + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify(this.callback); + } else { + this.promisifiedCallback = this.callback; + } + } + + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + + try { + debug( + 'Resolving socket for %o request: %o', + opts.protocol, + `${req.method} ${req.path}` + ); + Promise.resolve(this.promisifiedCallback(req, opts)).then( + onsocket, + callbackError + ); + } catch (err) { + Promise.reject(err).catch(callbackError); + } + } + + freeSocket(socket: net.Socket, opts: AgentOptions) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +} + +export = createAgent; diff --git a/node_modules/teeny-request/node_modules/agent-base/src/promisify.ts b/node_modules/teeny-request/node_modules/agent-base/src/promisify.ts new file mode 100644 index 0000000..60cc662 --- /dev/null +++ b/node_modules/teeny-request/node_modules/agent-base/src/promisify.ts @@ -0,0 +1,33 @@ +import { + Agent, + ClientRequest, + RequestOptions, + AgentCallbackCallback, + AgentCallbackPromise, + AgentCallbackReturn +} from './index'; + +type LegacyCallback = ( + req: ClientRequest, + opts: RequestOptions, + fn: AgentCallbackCallback +) => void; + +export default function promisify(fn: LegacyCallback): AgentCallbackPromise { + return function(this: Agent, req: ClientRequest, opts: RequestOptions) { + return new Promise((resolve, reject) => { + fn.call( + this, + req, + opts, + (err: Error | null | undefined, rtn?: AgentCallbackReturn) => { + if (err) { + reject(err); + } else { + resolve(rtn); + } + } + ); + }); + }; +} diff --git a/node_modules/teeny-request/node_modules/debug/LICENSE b/node_modules/teeny-request/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/teeny-request/node_modules/debug/README.md b/node_modules/teeny-request/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/debug/package.json b/node_modules/teeny-request/node_modules/debug/package.json new file mode 100644 index 0000000..3bcdc24 --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/teeny-request/node_modules/debug/src/browser.js b/node_modules/teeny-request/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/teeny-request/node_modules/debug/src/common.js b/node_modules/teeny-request/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/teeny-request/node_modules/debug/src/index.js b/node_modules/teeny-request/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/teeny-request/node_modules/debug/src/node.js b/node_modules/teeny-request/node_modules/debug/src/node.js new file mode 100644 index 0000000..79bc085 --- /dev/null +++ b/node_modules/teeny-request/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/README.md b/node_modules/teeny-request/node_modules/https-proxy-agent/README.md new file mode 100644 index 0000000..328656a --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/README.md @@ -0,0 +1,137 @@ +https-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTPS +[![Build Status](https://github.com/TooTallNate/node-https-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-https-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `https` module. + +Specifically, this `Agent` implementation connects to an intermediary "proxy" +server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to +open a direct TCP connection to the destination server. + +Since this agent implements the CONNECT HTTP method, it also works with other +protocols that use this method when connecting over proxies (i.e. WebSockets). +See the "Examples" section below for more. + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install https-proxy-agent +``` + + +Examples +-------- + +#### `https` module example + +``` js +var url = require('url'); +var https = require('https'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTPS endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'https://graph.facebook.com/tootallnate'; +console.log('attempting to GET %j', endpoint); +var options = url.parse(endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var agent = new HttpsProxyAgent(proxy); +options.agent = agent; + +https.get(options, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `ws` WebSocket connection example + +``` js +var url = require('url'); +var WebSocket = require('ws'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// WebSocket endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'ws://echo.websocket.org'; +var parsed = url.parse(endpoint); +console.log('attempting to connect to WebSocket %j', endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var options = url.parse(proxy); + +var agent = new HttpsProxyAgent(options); + +// finally, initiate the WebSocket connection +var socket = new WebSocket(endpoint, { agent: agent }); + +socket.on('open', function () { + console.log('"open" event!'); + socket.send('hello world'); +}); + +socket.on('message', function (data, flags) { + console.log('"message" event! %j %j', data, flags); + socket.close(); +}); +``` + +API +--- + +### new HttpsProxyAgent(Object options) + +The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects +to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket +requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT]. + +The `options` argument may either be a string URI of the proxy server to use, or an +"options" object with more specific properties: + + * `host` - String - Proxy host to connect to (may use `hostname` as well). Required. + * `port` - Number - Proxy port to connect to. Required. + * `protocol` - String - If `https:`, then use TLS to connect to the proxy. + * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method. + * Any other options given are passed to the `net.connect()`/`tls.connect()` functions. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.d.ts b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..4f1c636 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.d.ts @@ -0,0 +1,30 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpsProxyAgentOptions } from '.'; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +export default class HttpsProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpsProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: ClientRequest, opts: RequestOptions): Promise; +} diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js new file mode 100644 index 0000000..75d1136 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js @@ -0,0 +1,177 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const parse_proxy_response_1 = __importDefault(require("./parse-proxy-response")); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports.default = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js.map b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..0af6c17 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,oDAA4B;AAC5B,kDAAgC;AAEhC,2CAAkE;AAElE,kFAAwD;AAExD,MAAM,KAAK,GAAG,eAAW,CAAC,yBAAyB,CAAC,CAAC;AAErD;;;;;;;;;;;;;GAaG;AACH,MAAqB,eAAgB,SAAQ,kBAAK;IAIjD,YAAY,KAAsC;QACjD,IAAI,IAA4B,CAAC;QACjC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,2CAA2C,EAAE,IAAI,CAAC,CAAC;QACzD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAAgC,IAAI,CAAE,CAAC;QAElD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,sCAAsC;QACtC,sEAAsE;QACtE,IAAI,IAAI,CAAC,WAAW,IAAI,CAAC,CAAC,eAAe,IAAI,KAAK,CAAC,EAAE;YACpD,KAAK,CAAC,aAAa,GAAG,CAAC,UAAU,CAAC,CAAC;SACnC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAkB,EAClB,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YAEpC,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,MAAM,OAAO,qBAA6B,KAAK,CAAC,OAAO,CAAE,CAAC;YAC1D,MAAM,QAAQ,GAAG,GAAG,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;YAC7C,IAAI,OAAO,GAAG,WAAW,QAAQ,eAAe,CAAC;YAEjD,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,OAAO,CAAC,qBAAqB,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACpD,KAAK,CAAC,IAAI,CACV,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;aACvB;YAED,iDAAiD;YACjD,0CAA0C;YAC1C,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,IAAI,CAAC;YAC1C,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE;gBACzC,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;aACnB;YACD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;YAEpB,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC;YAC7B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;gBACxC,OAAO,IAAI,GAAG,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;aAC3C;YAED,MAAM,oBAAoB,GAAG,8BAAkB,CAAC,MAAM,CAAC,CAAC;YAExD,MAAM,CAAC,KAAK,CAAC,GAAG,OAAO,MAAM,CAAC,CAAC;YAE/B,MAAM,EACL,UAAU,EACV,QAAQ,EACR,GAAG,MAAM,oBAAoB,CAAC;YAE/B,IAAI,UAAU,KAAK,GAAG,EAAE;gBACvB,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;gBAE3B,IAAI,IAAI,CAAC,cAAc,EAAE;oBACxB,sDAAsD;oBACtD,8CAA8C;oBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;oBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC;oBAChD,OAAO,aAAG,CAAC,OAAO,iCACd,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;wBACN,UAAU,IACT,CAAC;iBACH;gBAED,OAAO,MAAM,CAAC;aACd;YAED,oEAAoE;YACpE,kEAAkE;YAClE,iEAAiE;YACjE,qBAAqB;YAErB,iEAAiE;YACjE,0DAA0D;YAC1D,oEAAoE;YACpE,mBAAmB;YACnB,EAAE;YACF,4CAA4C;YAC5C,MAAM,CAAC,OAAO,EAAE,CAAC;YAEjB,MAAM,UAAU,GAAG,IAAI,aAAG,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;YACvD,UAAU,CAAC,QAAQ,GAAG,IAAI,CAAC;YAE3B,oEAAoE;YACpE,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAa,EAAE,EAAE;gBACpC,KAAK,CAAC,2CAA2C,CAAC,CAAC;gBACnD,gBAAM,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;gBAEpC,gEAAgE;gBAChE,8DAA8D;gBAC9D,YAAY;gBACZ,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBACjB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,OAAO,UAAU,CAAC;QACnB,CAAC;KAAA;CACD;AA3JD,kCA2JC;AAED,SAAS,MAAM,CAAC,MAAkC;IACjD,MAAM,CAAC,MAAM,EAAE,CAAC;AACjB,CAAC;AAED,SAAS,aAAa,CAAC,IAAY,EAAE,MAAe;IACnD,OAAO,OAAO,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,KAAK,GAAG,CAAC,CAAC,CAAC;AACtE,CAAC;AAED,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.d.ts b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..0d60062 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.d.ts @@ -0,0 +1,23 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import { OutgoingHttpHeaders } from 'http'; +import _HttpsProxyAgent from './agent'; +declare function createHttpsProxyAgent(opts: string | createHttpsProxyAgent.HttpsProxyAgentOptions): _HttpsProxyAgent; +declare namespace createHttpsProxyAgent { + interface BaseHttpsProxyAgentOptions { + headers?: OutgoingHttpHeaders; + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpsProxyAgentOptions extends AgentOptions, BaseHttpsProxyAgentOptions, Partial> { + } + export type HttpsProxyAgent = _HttpsProxyAgent; + export const HttpsProxyAgent: typeof _HttpsProxyAgent; + export {}; +} +export = createHttpsProxyAgent; diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 0000000..b03e763 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js.map b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..f3ce559 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAKA,oDAAuC;AAEvC,SAAS,qBAAqB,CAC7B,IAA2D;IAE3D,OAAO,IAAI,eAAgB,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,WAAU,qBAAqB;IAoBjB,qCAAe,GAAG,eAAgB,CAAC;IAEhD,qBAAqB,CAAC,SAAS,GAAG,eAAgB,CAAC,SAAS,CAAC;AAC9D,CAAC,EAvBS,qBAAqB,KAArB,qBAAqB,QAuB9B;AAED,iBAAS,qBAAqB,CAAC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts new file mode 100644 index 0000000..7565674 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts @@ -0,0 +1,7 @@ +/// +import { Readable } from 'stream'; +export interface ProxyResponse { + statusCode: number; + buffered: Buffer; +} +export default function parseProxyResponse(socket: Readable): Promise; diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 0000000..aa5ce3c --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,66 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const debug_1 = __importDefault(require("debug")); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports.default = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map new file mode 100644 index 0000000..bacdb84 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-proxy-response.js","sourceRoot":"","sources":["../src/parse-proxy-response.ts"],"names":[],"mappings":";;;;;AAAA,kDAAgC;AAGhC,MAAM,KAAK,GAAG,eAAW,CAAC,wCAAwC,CAAC,CAAC;AAOpE,SAAwB,kBAAkB,CACzC,MAAgB;IAEhB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,+EAA+E;QAC/E,gFAAgF;QAChF,8EAA8E;QAC9E,8BAA8B;QAC9B,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,SAAS,IAAI;YACZ,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YACxB,IAAI,CAAC;gBAAE,MAAM,CAAC,CAAC,CAAC,CAAC;;gBACZ,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,OAAO;YACf,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YACpC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACzC,CAAC;QAED,SAAS,OAAO,CAAC,GAAW;YAC3B,KAAK,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,KAAK;YACb,KAAK,CAAC,OAAO,CAAC,CAAC;QAChB,CAAC;QAED,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,KAAK,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QAED,SAAS,MAAM,CAAC,CAAS;YACxB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAChB,aAAa,IAAI,CAAC,CAAC,MAAM,CAAC;YAE1B,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACvD,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YAElD,IAAI,YAAY,KAAK,CAAC,CAAC,EAAE;gBACxB,iBAAiB;gBACjB,KAAK,CAAC,8CAA8C,CAAC,CAAC;gBACtD,IAAI,EAAE,CAAC;gBACP,OAAO;aACP;YAED,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAClC,OAAO,EACP,CAAC,EACD,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CACxB,CAAC;YACF,MAAM,UAAU,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5C,KAAK,CAAC,+BAA+B,EAAE,SAAS,CAAC,CAAC;YAClD,OAAO,CAAC;gBACP,UAAU;gBACV,QAAQ;aACR,CAAC,CAAC;QACJ,CAAC;QAED,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QAExB,IAAI,EAAE,CAAC;IACR,CAAC,CAAC,CAAC;AACJ,CAAC;AAvED,qCAuEC"} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/https-proxy-agent/package.json b/node_modules/teeny-request/node_modules/https-proxy-agent/package.json new file mode 100644 index 0000000..fb2aba1 --- /dev/null +++ b/node_modules/teeny-request/node_modules/https-proxy-agent/package.json @@ -0,0 +1,56 @@ +{ + "name": "https-proxy-agent", + "version": "5.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "dist/index", + "types": "dist/index", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-https-proxy-agent.git" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-https-proxy-agent/issues" + }, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/teeny-request/node_modules/ms/index.js b/node_modules/teeny-request/node_modules/ms/index.js new file mode 100644 index 0000000..c4498bc --- /dev/null +++ b/node_modules/teeny-request/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/teeny-request/node_modules/ms/license.md b/node_modules/teeny-request/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/teeny-request/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/ms/package.json b/node_modules/teeny-request/node_modules/ms/package.json new file mode 100644 index 0000000..eea666e --- /dev/null +++ b/node_modules/teeny-request/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/teeny-request/node_modules/ms/readme.md b/node_modules/teeny-request/node_modules/ms/readme.md new file mode 100644 index 0000000..9a1996b --- /dev/null +++ b/node_modules/teeny-request/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md b/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..0412ad8 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,274 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.1](https://github.com/uuidjs/uuid/compare/v9.0.0...v9.0.1) (2023-09-12) + +### build + +- Fix CI to work with Node.js 20.x + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md b/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 0000000..4a4503d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/teeny-request/node_modules/uuid/LICENSE.md b/node_modules/teeny-request/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..3934168 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/teeny-request/node_modules/uuid/README.md b/node_modules/teeny-request/node_modules/uuid/README.md new file mode 100644 index 0000000..4f51e09 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/README.md @@ -0,0 +1,466 @@ + + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - NodeJS 12+ ([LTS releases](https://github.com/nodejs/Release)) + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +> **Note** Upgrading from `uuid@3`? Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +> **Note** Only interested in creating a version 4 UUID? You might be able to use [`crypto.randomUUID()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID), eliminating the need to install this library. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanoseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +--- + +Markdown generated from [README_js.md](README_js.md) by diff --git a/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid b/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid new file mode 100644 index 0000000..f38d2ee --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 0000000..5586dd3 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 0000000..f12212e --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 0000000..b22292c --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 0000000..6e65234 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 0000000..d3c2565 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 0000000..4d68b04 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 0000000..f0d1992 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 0000000..8006244 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 0000000..e23850b --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/index.js b/node_modules/teeny-request/node_modules/uuid/dist/index.js new file mode 100644 index 0000000..88d676a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/md5.js b/node_modules/teeny-request/node_modules/uuid/dist/md5.js new file mode 100644 index 0000000..824d481 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/native.js b/node_modules/teeny-request/node_modules/uuid/dist/native.js new file mode 100644 index 0000000..de80469 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/nil.js b/node_modules/teeny-request/node_modules/uuid/dist/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/parse.js b/node_modules/teeny-request/node_modules/uuid/dist/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/regex.js b/node_modules/teeny-request/node_modules/uuid/dist/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/rng.js b/node_modules/teeny-request/node_modules/uuid/dist/rng.js new file mode 100644 index 0000000..3507f93 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js b/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/sha1.js b/node_modules/teeny-request/node_modules/uuid/dist/sha1.js new file mode 100644 index 0000000..03bdd63 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/stringify.js b/node_modules/teeny-request/node_modules/uuid/dist/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js b/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 0000000..50a7a9f --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v1.js b/node_modules/teeny-request/node_modules/uuid/dist/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v3.js b/node_modules/teeny-request/node_modules/uuid/dist/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v35.js b/node_modules/teeny-request/node_modules/uuid/dist/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v4.js b/node_modules/teeny-request/node_modules/uuid/dist/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/v5.js b/node_modules/teeny-request/node_modules/uuid/dist/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/validate.js b/node_modules/teeny-request/node_modules/uuid/dist/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/dist/version.js b/node_modules/teeny-request/node_modules/uuid/dist/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/teeny-request/node_modules/uuid/package.json b/node_modules/teeny-request/node_modules/uuid/package.json new file mode 100644 index 0000000..6cc3361 --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "9.0.1", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.9", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v18' ) && ( npm run build && npx runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/teeny-request/node_modules/uuid/wrapper.mjs b/node_modules/teeny-request/node_modules/uuid/wrapper.mjs new file mode 100644 index 0000000..c31e9ce --- /dev/null +++ b/node_modules/teeny-request/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/teeny-request/package.json b/node_modules/teeny-request/package.json new file mode 100644 index 0000000..d24f9dd --- /dev/null +++ b/node_modules/teeny-request/package.json @@ -0,0 +1,68 @@ +{ + "name": "teeny-request", + "version": "9.0.0", + "description": "Like request, but smaller.", + "main": "./build/src/index.js", + "types": "./build/src/index.d.ts", + "engines": { + "node": ">=14" + }, + "scripts": { + "test": "c8 mocha build/test", + "compile": "tsc -p .", + "pretest": "npm run compile", + "lint": "gts check", + "clean": "gts clean", + "fix": "gts fix", + "prepare": "npm run compile", + "docs": "compodoc src/", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "samples-test": "echo no sample tests!", + "system-test": "echo no system tests!", + "precompile": "gts clean" + }, + "files": [ + "build/src" + ], + "repository": "googleapis/teeny-request", + "keywords": [ + "request", + "node-fetch", + "fetch" + ], + "author": "fhinkel", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/googleapis/teeny-request/issues" + }, + "homepage": "https://github.com/googleapis/teeny-request#readme", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "devDependencies": { + "@babel/plugin-proposal-private-methods": "^7.18.6", + "@compodoc/compodoc": "^1.1.9", + "@types/mocha": "^10.0.0", + "@types/node-fetch": "^2.5.7", + "@types/sinon": "^10.0.0", + "@types/uuid": "^9.0.0", + "c8": "^8.0.0", + "codecov": "^3.1.0", + "gts": "^3.1.1", + "linkinator": "^4.0.0", + "mocha": "^10.0.0", + "nock": "^13.0.0", + "sinon": "^15.0.0", + "typescript": "^5.1.6" + }, + "nyc": { + "exclude": [ + "build/test" + ] + } +} diff --git a/node_modules/tr46/.npmignore b/node_modules/tr46/.npmignore new file mode 100644 index 0000000..96e9161 --- /dev/null +++ b/node_modules/tr46/.npmignore @@ -0,0 +1,4 @@ +scripts/ +test/ + +!lib/mapping_table.json diff --git a/node_modules/tr46/index.js b/node_modules/tr46/index.js new file mode 100644 index 0000000..9ce12ca --- /dev/null +++ b/node_modules/tr46/index.js @@ -0,0 +1,193 @@ +"use strict"; + +var punycode = require("punycode"); +var mappingTable = require("./lib/mappingTable.json"); + +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; + +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} + +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; + + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } + + return null; +} + +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} + +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; + + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } + + processed += String.fromCodePoint(codePoint); + break; + } + } + + return { + string: processed, + error: hasError + }; +} + +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } + + var error = false; + + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } + + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } + + return { + label: label, + error: error + }; +} + +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); + + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } + + return { + string: labels.join("."), + error: result.error + }; +} + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); + + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } + + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } + + if (result.error) return null; + return labels.join("."); +}; + +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + + return { + domain: result.string, + error: result.error + }; +}; + +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; diff --git a/node_modules/tr46/lib/.gitkeep b/node_modules/tr46/lib/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/tr46/lib/mappingTable.json b/node_modules/tr46/lib/mappingTable.json new file mode 100644 index 0000000..89cf19a --- /dev/null +++ b/node_modules/tr46/lib/mappingTable.json @@ -0,0 +1 @@ +[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]] \ No newline at end of file diff --git a/node_modules/tr46/package.json b/node_modules/tr46/package.json new file mode 100644 index 0000000..b6826da --- /dev/null +++ b/node_modules/tr46/package.json @@ -0,0 +1,31 @@ +{ + "name": "tr46", + "version": "0.0.3", + "description": "An implementation of the Unicode TR46 spec", + "main": "index.js", + "scripts": { + "test": "mocha", + "pretest": "node scripts/getLatestUnicodeTests.js", + "prepublish": "node scripts/generateMappingTable.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Sebmaster/tr46.js.git" + }, + "keywords": [ + "unicode", + "tr46", + "url", + "whatwg" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "bugs": { + "url": "https://github.com/Sebmaster/tr46.js/issues" + }, + "homepage": "https://github.com/Sebmaster/tr46.js#readme", + "devDependencies": { + "mocha": "^2.2.5", + "request": "^2.57.0" + } +} diff --git a/node_modules/undici-types/README.md b/node_modules/undici-types/README.md new file mode 100644 index 0000000..20a721c --- /dev/null +++ b/node_modules/undici-types/README.md @@ -0,0 +1,6 @@ +# undici-types + +This package is a dual-publish of the [undici](https://www.npmjs.com/package/undici) library types. The `undici` package **still contains types**. This package is for users who _only_ need undici types (such as for `@types/node`). It is published alongside every release of `undici`, so you can always use the same version. + +- [GitHub nodejs/undici](https://github.com/nodejs/undici) +- [Undici Documentation](https://undici.nodejs.org/#/) diff --git a/node_modules/undici-types/agent.d.ts b/node_modules/undici-types/agent.d.ts new file mode 100644 index 0000000..58081ce --- /dev/null +++ b/node_modules/undici-types/agent.d.ts @@ -0,0 +1,31 @@ +import { URL } from 'url' +import Pool from './pool' +import Dispatcher from "./dispatcher"; + +export default Agent + +declare class Agent extends Dispatcher{ + constructor(opts?: Agent.Options) + /** `true` after `dispatcher.close()` has been called. */ + closed: boolean; + /** `true` after `dispatcher.destroyed()` has been called or `dispatcher.close()` has been called and the dispatcher shutdown has completed. */ + destroyed: boolean; + /** Dispatches a request. */ + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; +} + +declare namespace Agent { + export interface Options extends Pool.Options { + /** Default: `(origin, opts) => new Pool(origin, opts)`. */ + factory?(origin: string | URL, opts: Object): Dispatcher; + /** Integer. Default: `0` */ + maxRedirections?: number; + + interceptors?: { Agent?: readonly Dispatcher.DispatchInterceptor[] } & Pool.Options["interceptors"] + } + + export interface DispatchOptions extends Dispatcher.DispatchOptions { + /** Integer. */ + maxRedirections?: number; + } +} diff --git a/node_modules/undici-types/api.d.ts b/node_modules/undici-types/api.d.ts new file mode 100644 index 0000000..400341d --- /dev/null +++ b/node_modules/undici-types/api.d.ts @@ -0,0 +1,43 @@ +import { URL, UrlObject } from 'url' +import { Duplex } from 'stream' +import Dispatcher from './dispatcher' + +export { + request, + stream, + pipeline, + connect, + upgrade, +} + +/** Performs an HTTP request. */ +declare function request( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit & Partial>, +): Promise; + +/** A faster version of `request`. */ +declare function stream( + url: string | URL | UrlObject, + options: { dispatcher?: Dispatcher } & Omit, + factory: Dispatcher.StreamFactory +): Promise; + +/** For easy use with `stream.pipeline`. */ +declare function pipeline( + url: string | URL | UrlObject, + options: { dispatcher?: Dispatcher } & Omit, + handler: Dispatcher.PipelineHandler +): Duplex; + +/** Starts two-way communications with the requested resource. */ +declare function connect( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit +): Promise; + +/** Upgrade to a different protocol. */ +declare function upgrade( + url: string | URL | UrlObject, + options?: { dispatcher?: Dispatcher } & Omit +): Promise; diff --git a/node_modules/undici-types/balanced-pool.d.ts b/node_modules/undici-types/balanced-pool.d.ts new file mode 100644 index 0000000..d1e9375 --- /dev/null +++ b/node_modules/undici-types/balanced-pool.d.ts @@ -0,0 +1,18 @@ +import Pool from './pool' +import Dispatcher from './dispatcher' +import { URL } from 'url' + +export default BalancedPool + +declare class BalancedPool extends Dispatcher { + constructor(url: string | string[] | URL | URL[], options?: Pool.Options); + + addUpstream(upstream: string | URL): BalancedPool; + removeUpstream(upstream: string | URL): BalancedPool; + upstreams: Array; + + /** `true` after `pool.close()` has been called. */ + closed: boolean; + /** `true` after `pool.destroyed()` has been called or `pool.close()` has been called and the pool shutdown has completed. */ + destroyed: boolean; +} diff --git a/node_modules/undici-types/cache.d.ts b/node_modules/undici-types/cache.d.ts new file mode 100644 index 0000000..4c33335 --- /dev/null +++ b/node_modules/undici-types/cache.d.ts @@ -0,0 +1,36 @@ +import type { RequestInfo, Response, Request } from './fetch' + +export interface CacheStorage { + match (request: RequestInfo, options?: MultiCacheQueryOptions): Promise, + has (cacheName: string): Promise, + open (cacheName: string): Promise, + delete (cacheName: string): Promise, + keys (): Promise +} + +declare const CacheStorage: { + prototype: CacheStorage + new(): CacheStorage +} + +export interface Cache { + match (request: RequestInfo, options?: CacheQueryOptions): Promise, + matchAll (request?: RequestInfo, options?: CacheQueryOptions): Promise, + add (request: RequestInfo): Promise, + addAll (requests: RequestInfo[]): Promise, + put (request: RequestInfo, response: Response): Promise, + delete (request: RequestInfo, options?: CacheQueryOptions): Promise, + keys (request?: RequestInfo, options?: CacheQueryOptions): Promise +} + +export interface CacheQueryOptions { + ignoreSearch?: boolean, + ignoreMethod?: boolean, + ignoreVary?: boolean +} + +export interface MultiCacheQueryOptions extends CacheQueryOptions { + cacheName?: string +} + +export declare const caches: CacheStorage diff --git a/node_modules/undici-types/client.d.ts b/node_modules/undici-types/client.d.ts new file mode 100644 index 0000000..74948b1 --- /dev/null +++ b/node_modules/undici-types/client.d.ts @@ -0,0 +1,97 @@ +import { URL } from 'url' +import { TlsOptions } from 'tls' +import Dispatcher from './dispatcher' +import buildConnector from "./connector"; + +/** + * A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default. + */ +export class Client extends Dispatcher { + constructor(url: string | URL, options?: Client.Options); + /** Property to get and set the pipelining factor. */ + pipelining: number; + /** `true` after `client.close()` has been called. */ + closed: boolean; + /** `true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed. */ + destroyed: boolean; +} + +export declare namespace Client { + export interface OptionsInterceptors { + Client: readonly Dispatcher.DispatchInterceptor[]; + } + export interface Options { + /** TODO */ + interceptors?: OptionsInterceptors; + /** The maximum length of request headers in bytes. Default: Node.js' `--max-http-header-size` or `16384` (16KiB). */ + maxHeaderSize?: number; + /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers (Node 14 and above only). Default: `300e3` milliseconds (300s). */ + headersTimeout?: number; + /** @deprecated unsupported socketTimeout, use headersTimeout & bodyTimeout instead */ + socketTimeout?: never; + /** @deprecated unsupported requestTimeout, use headersTimeout & bodyTimeout instead */ + requestTimeout?: never; + /** TODO */ + connectTimeout?: number; + /** The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Default: `300e3` milliseconds (300s). */ + bodyTimeout?: number; + /** @deprecated unsupported idleTimeout, use keepAliveTimeout instead */ + idleTimeout?: never; + /** @deprecated unsupported keepAlive, use pipelining=0 instead */ + keepAlive?: never; + /** the timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. Default: `4e3` milliseconds (4s). */ + keepAliveTimeout?: number; + /** @deprecated unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead */ + maxKeepAliveTimeout?: never; + /** the maximum allowed `idleTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Default: `600e3` milliseconds (10min). */ + keepAliveMaxTimeout?: number; + /** A number of milliseconds subtracted from server *keep-alive* hints when overriding `idleTimeout` to account for timing inaccuracies caused by e.g. transport latency. Default: `1e3` milliseconds (1s). */ + keepAliveTimeoutThreshold?: number; + /** TODO */ + socketPath?: string; + /** The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Default: `1`. */ + pipelining?: number; + /** @deprecated use the connect option instead */ + tls?: never; + /** If `true`, an error is thrown when the request content-length header doesn't match the length of the request body. Default: `true`. */ + strictContentLength?: boolean; + /** TODO */ + maxCachedSessions?: number; + /** TODO */ + maxRedirections?: number; + /** TODO */ + connect?: buildConnector.BuildOptions | buildConnector.connector; + /** TODO */ + maxRequestsPerClient?: number; + /** TODO */ + localAddress?: string; + /** Max response body size in bytes, -1 is disabled */ + maxResponseSize?: number; + /** Enables a family autodetection algorithm that loosely implements section 5 of RFC 8305. */ + autoSelectFamily?: boolean; + /** The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. */ + autoSelectFamilyAttemptTimeout?: number; + /** + * @description Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. + * @default false + */ + allowH2?: boolean; + /** + * @description Dictates the maximum number of concurrent streams for a single H2 session. It can be overriden by a SETTINGS remote frame. + * @default 100 + */ + maxConcurrentStreams?: number + } + export interface SocketInfo { + localAddress?: string + localPort?: number + remoteAddress?: string + remotePort?: number + remoteFamily?: string + timeout?: number + bytesWritten?: number + bytesRead?: number + } +} + +export default Client; diff --git a/node_modules/undici-types/connector.d.ts b/node_modules/undici-types/connector.d.ts new file mode 100644 index 0000000..bd92433 --- /dev/null +++ b/node_modules/undici-types/connector.d.ts @@ -0,0 +1,34 @@ +import { TLSSocket, ConnectionOptions } from 'tls' +import { IpcNetConnectOpts, Socket, TcpNetConnectOpts } from 'net' + +export default buildConnector +declare function buildConnector (options?: buildConnector.BuildOptions): buildConnector.connector + +declare namespace buildConnector { + export type BuildOptions = (ConnectionOptions | TcpNetConnectOpts | IpcNetConnectOpts) & { + allowH2?: boolean; + maxCachedSessions?: number | null; + socketPath?: string | null; + timeout?: number | null; + port?: number; + keepAlive?: boolean | null; + keepAliveInitialDelay?: number | null; + } + + export interface Options { + hostname: string + host?: string + protocol: string + port: string + servername?: string + localAddress?: string | null + httpSocket?: Socket + } + + export type Callback = (...args: CallbackArgs) => void + type CallbackArgs = [null, Socket | TLSSocket] | [Error, null] + + export interface connector { + (options: buildConnector.Options, callback: buildConnector.Callback): void + } +} diff --git a/node_modules/undici-types/content-type.d.ts b/node_modules/undici-types/content-type.d.ts new file mode 100644 index 0000000..f2a87f1 --- /dev/null +++ b/node_modules/undici-types/content-type.d.ts @@ -0,0 +1,21 @@ +/// + +interface MIMEType { + type: string + subtype: string + parameters: Map + essence: string +} + +/** + * Parse a string to a {@link MIMEType} object. Returns `failure` if the string + * couldn't be parsed. + * @see https://mimesniff.spec.whatwg.org/#parse-a-mime-type + */ +export function parseMIMEType (input: string): 'failure' | MIMEType + +/** + * Convert a MIMEType object to a string. + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +export function serializeAMimeType (mimeType: MIMEType): string diff --git a/node_modules/undici-types/cookies.d.ts b/node_modules/undici-types/cookies.d.ts new file mode 100644 index 0000000..aa38cae --- /dev/null +++ b/node_modules/undici-types/cookies.d.ts @@ -0,0 +1,28 @@ +/// + +import type { Headers } from './fetch' + +export interface Cookie { + name: string + value: string + expires?: Date | number + maxAge?: number + domain?: string + path?: string + secure?: boolean + httpOnly?: boolean + sameSite?: 'Strict' | 'Lax' | 'None' + unparsed?: string[] +} + +export function deleteCookie ( + headers: Headers, + name: string, + attributes?: { name?: string, domain?: string } +): void + +export function getCookies (headers: Headers): Record + +export function getSetCookies (headers: Headers): Cookie[] + +export function setCookie (headers: Headers, cookie: Cookie): void diff --git a/node_modules/undici-types/diagnostics-channel.d.ts b/node_modules/undici-types/diagnostics-channel.d.ts new file mode 100644 index 0000000..85d4482 --- /dev/null +++ b/node_modules/undici-types/diagnostics-channel.d.ts @@ -0,0 +1,67 @@ +import { Socket } from "net"; +import { URL } from "url"; +import Connector from "./connector"; +import Dispatcher from "./dispatcher"; + +declare namespace DiagnosticsChannel { + interface Request { + origin?: string | URL; + completed: boolean; + method?: Dispatcher.HttpMethod; + path: string; + headers: string; + addHeader(key: string, value: string): Request; + } + interface Response { + statusCode: number; + statusText: string; + headers: Array; + } + type Error = unknown; + interface ConnectParams { + host: URL["host"]; + hostname: URL["hostname"]; + protocol: URL["protocol"]; + port: URL["port"]; + servername: string | null; + } + type Connector = Connector.connector; + export interface RequestCreateMessage { + request: Request; + } + export interface RequestBodySentMessage { + request: Request; + } + export interface RequestHeadersMessage { + request: Request; + response: Response; + } + export interface RequestTrailersMessage { + request: Request; + trailers: Array; + } + export interface RequestErrorMessage { + request: Request; + error: Error; + } + export interface ClientSendHeadersMessage { + request: Request; + headers: string; + socket: Socket; + } + export interface ClientBeforeConnectMessage { + connectParams: ConnectParams; + connector: Connector; + } + export interface ClientConnectedMessage { + socket: Socket; + connectParams: ConnectParams; + connector: Connector; + } + export interface ClientConnectErrorMessage { + error: Error; + socket: Socket; + connectParams: ConnectParams; + connector: Connector; + } +} diff --git a/node_modules/undici-types/dispatcher.d.ts b/node_modules/undici-types/dispatcher.d.ts new file mode 100644 index 0000000..816db19 --- /dev/null +++ b/node_modules/undici-types/dispatcher.d.ts @@ -0,0 +1,241 @@ +import { URL } from 'url' +import { Duplex, Readable, Writable } from 'stream' +import { EventEmitter } from 'events' +import { Blob } from 'buffer' +import { IncomingHttpHeaders } from './header' +import BodyReadable from './readable' +import { FormData } from './formdata' +import Errors from './errors' + +type AbortSignal = unknown; + +export default Dispatcher + +/** Dispatcher is the core API used to dispatch requests. */ +declare class Dispatcher extends EventEmitter { + /** Dispatches a request. This API is expected to evolve through semver-major versions and is less stable than the preceding higher level APIs. It is primarily intended for library developers who implement higher level APIs on top of this. */ + dispatch(options: Dispatcher.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + /** Starts two-way communications with the requested resource. */ + connect(options: Dispatcher.ConnectOptions): Promise; + connect(options: Dispatcher.ConnectOptions, callback: (err: Error | null, data: Dispatcher.ConnectData) => void): void; + /** Performs an HTTP request. */ + request(options: Dispatcher.RequestOptions): Promise; + request(options: Dispatcher.RequestOptions, callback: (err: Error | null, data: Dispatcher.ResponseData) => void): void; + /** For easy use with `stream.pipeline`. */ + pipeline(options: Dispatcher.PipelineOptions, handler: Dispatcher.PipelineHandler): Duplex; + /** A faster version of `Dispatcher.request`. */ + stream(options: Dispatcher.RequestOptions, factory: Dispatcher.StreamFactory): Promise; + stream(options: Dispatcher.RequestOptions, factory: Dispatcher.StreamFactory, callback: (err: Error | null, data: Dispatcher.StreamData) => void): void; + /** Upgrade to a different protocol. */ + upgrade(options: Dispatcher.UpgradeOptions): Promise; + upgrade(options: Dispatcher.UpgradeOptions, callback: (err: Error | null, data: Dispatcher.UpgradeData) => void): void; + /** Closes the client and gracefully waits for enqueued requests to complete before invoking the callback (or returning a promise if no callback is provided). */ + close(): Promise; + close(callback: () => void): void; + /** Destroy the client abruptly with the given err. All the pending and running requests will be asynchronously aborted and error. Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided). Since this operation is asynchronously dispatched there might still be some progress on dispatched requests. */ + destroy(): Promise; + destroy(err: Error | null): Promise; + destroy(callback: () => void): void; + destroy(err: Error | null, callback: () => void): void; + + on(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + on(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + on(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + on(eventName: 'drain', callback: (origin: URL) => void): this; + + + once(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + once(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + once(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + once(eventName: 'drain', callback: (origin: URL) => void): this; + + + off(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + off(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + off(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + off(eventName: 'drain', callback: (origin: URL) => void): this; + + + addListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + addListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + addListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + addListener(eventName: 'drain', callback: (origin: URL) => void): this; + + removeListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + removeListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + removeListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + removeListener(eventName: 'drain', callback: (origin: URL) => void): this; + + prependListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + prependListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependListener(eventName: 'drain', callback: (origin: URL) => void): this; + + prependOnceListener(eventName: 'connect', callback: (origin: URL, targets: readonly Dispatcher[]) => void): this; + prependOnceListener(eventName: 'disconnect', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependOnceListener(eventName: 'connectionError', callback: (origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void): this; + prependOnceListener(eventName: 'drain', callback: (origin: URL) => void): this; + + listeners(eventName: 'connect'): ((origin: URL, targets: readonly Dispatcher[]) => void)[] + listeners(eventName: 'disconnect'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + listeners(eventName: 'connectionError'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + listeners(eventName: 'drain'): ((origin: URL) => void)[]; + + rawListeners(eventName: 'connect'): ((origin: URL, targets: readonly Dispatcher[]) => void)[] + rawListeners(eventName: 'disconnect'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + rawListeners(eventName: 'connectionError'): ((origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError) => void)[]; + rawListeners(eventName: 'drain'): ((origin: URL) => void)[]; + + emit(eventName: 'connect', origin: URL, targets: readonly Dispatcher[]): boolean; + emit(eventName: 'disconnect', origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): boolean; + emit(eventName: 'connectionError', origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): boolean; + emit(eventName: 'drain', origin: URL): boolean; +} + +declare namespace Dispatcher { + export interface DispatchOptions { + origin?: string | URL; + path: string; + method: HttpMethod; + /** Default: `null` */ + body?: string | Buffer | Uint8Array | Readable | null | FormData; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** Query string params to be embedded in the request URL. Default: `null` */ + query?: Record; + /** Whether the requests can be safely retried or not. If `false` the request won't be sent until all preceding requests in the pipeline have completed. Default: `true` if `method` is `HEAD` or `GET`. */ + idempotent?: boolean; + /** Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received. */ + blocking?: boolean; + /** Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. Default: `method === 'CONNECT' || null`. */ + upgrade?: boolean | string | null; + /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers. Defaults to 300 seconds. */ + headersTimeout?: number | null; + /** The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use 0 to disable it entirely. Defaults to 300 seconds. */ + bodyTimeout?: number | null; + /** Whether the request should stablish a keep-alive or not. Default `false` */ + reset?: boolean; + /** Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. Defaults to false */ + throwOnError?: boolean; + /** For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server*/ + expectContinue?: boolean; + } + export interface ConnectOptions { + path: string; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** This argument parameter is passed through to `ConnectData` */ + opaque?: unknown; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + responseHeader?: 'raw' | null; + } + export interface RequestOptions extends DispatchOptions { + /** Default: `null` */ + opaque?: unknown; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + onInfo?: (info: { statusCode: number, headers: Record }) => void; + /** Default: `null` */ + responseHeader?: 'raw' | null; + /** Default: `64 KiB` */ + highWaterMark?: number; + } + export interface PipelineOptions extends RequestOptions { + /** `true` if the `handler` will return an object stream. Default: `false` */ + objectMode?: boolean; + } + export interface UpgradeOptions { + path: string; + /** Default: `'GET'` */ + method?: string; + /** Default: `null` */ + headers?: IncomingHttpHeaders | string[] | null; + /** A string of comma separated protocols, in descending preference order. Default: `'Websocket'` */ + protocol?: string; + /** Default: `null` */ + signal?: AbortSignal | EventEmitter | null; + /** Default: 0 */ + maxRedirections?: number; + /** Default: `null` */ + responseHeader?: 'raw' | null; + } + export interface ConnectData { + statusCode: number; + headers: IncomingHttpHeaders; + socket: Duplex; + opaque: unknown; + } + export interface ResponseData { + statusCode: number; + headers: IncomingHttpHeaders; + body: BodyReadable & BodyMixin; + trailers: Record; + opaque: unknown; + context: object; + } + export interface PipelineHandlerData { + statusCode: number; + headers: IncomingHttpHeaders; + opaque: unknown; + body: BodyReadable; + context: object; + } + export interface StreamData { + opaque: unknown; + trailers: Record; + } + export interface UpgradeData { + headers: IncomingHttpHeaders; + socket: Duplex; + opaque: unknown; + } + export interface StreamFactoryData { + statusCode: number; + headers: IncomingHttpHeaders; + opaque: unknown; + context: object; + } + export type StreamFactory = (data: StreamFactoryData) => Writable; + export interface DispatchHandlers { + /** Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails. */ + onConnect?(abort: () => void): void; + /** Invoked when an error has occurred. */ + onError?(err: Error): void; + /** Invoked when request is upgraded either due to a `Upgrade` header or `CONNECT` method. */ + onUpgrade?(statusCode: number, headers: Buffer[] | string[] | null, socket: Duplex): void; + /** Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. */ + onHeaders?(statusCode: number, headers: Buffer[] | string[] | null, resume: () => void): boolean; + /** Invoked when response payload data is received. */ + onData?(chunk: Buffer): boolean; + /** Invoked when response payload and trailers have been received and the request has completed. */ + onComplete?(trailers: string[] | null): void; + /** Invoked when a body chunk is sent to the server. May be invoked multiple times for chunked requests */ + onBodySent?(chunkSize: number, totalBytesSent: number): void; + } + export type PipelineHandler = (data: PipelineHandlerData) => Readable; + export type HttpMethod = 'GET' | 'HEAD' | 'POST' | 'PUT' | 'DELETE' | 'CONNECT' | 'OPTIONS' | 'TRACE' | 'PATCH'; + + /** + * @link https://fetch.spec.whatwg.org/#body-mixin + */ + interface BodyMixin { + readonly body?: never; // throws on node v16.6.0 + readonly bodyUsed: boolean; + arrayBuffer(): Promise; + blob(): Promise; + formData(): Promise; + json(): Promise; + text(): Promise; + } + + export interface DispatchInterceptor { + (dispatch: Dispatcher['dispatch']): Dispatcher['dispatch'] + } +} diff --git a/node_modules/undici-types/errors.d.ts b/node_modules/undici-types/errors.d.ts new file mode 100644 index 0000000..7923ddd --- /dev/null +++ b/node_modules/undici-types/errors.d.ts @@ -0,0 +1,128 @@ +import { IncomingHttpHeaders } from "./header"; +import Client from './client' + +export default Errors + +declare namespace Errors { + export class UndiciError extends Error { + name: string; + code: string; + } + + /** Connect timeout error. */ + export class ConnectTimeoutError extends UndiciError { + name: 'ConnectTimeoutError'; + code: 'UND_ERR_CONNECT_TIMEOUT'; + } + + /** A header exceeds the `headersTimeout` option. */ + export class HeadersTimeoutError extends UndiciError { + name: 'HeadersTimeoutError'; + code: 'UND_ERR_HEADERS_TIMEOUT'; + } + + /** Headers overflow error. */ + export class HeadersOverflowError extends UndiciError { + name: 'HeadersOverflowError' + code: 'UND_ERR_HEADERS_OVERFLOW' + } + + /** A body exceeds the `bodyTimeout` option. */ + export class BodyTimeoutError extends UndiciError { + name: 'BodyTimeoutError'; + code: 'UND_ERR_BODY_TIMEOUT'; + } + + export class ResponseStatusCodeError extends UndiciError { + constructor ( + message?: string, + statusCode?: number, + headers?: IncomingHttpHeaders | string[] | null, + body?: null | Record | string + ); + name: 'ResponseStatusCodeError'; + code: 'UND_ERR_RESPONSE_STATUS_CODE'; + body: null | Record | string + status: number + statusCode: number + headers: IncomingHttpHeaders | string[] | null; + } + + /** Passed an invalid argument. */ + export class InvalidArgumentError extends UndiciError { + name: 'InvalidArgumentError'; + code: 'UND_ERR_INVALID_ARG'; + } + + /** Returned an invalid value. */ + export class InvalidReturnValueError extends UndiciError { + name: 'InvalidReturnValueError'; + code: 'UND_ERR_INVALID_RETURN_VALUE'; + } + + /** The request has been aborted by the user. */ + export class RequestAbortedError extends UndiciError { + name: 'AbortError'; + code: 'UND_ERR_ABORTED'; + } + + /** Expected error with reason. */ + export class InformationalError extends UndiciError { + name: 'InformationalError'; + code: 'UND_ERR_INFO'; + } + + /** Request body length does not match content-length header. */ + export class RequestContentLengthMismatchError extends UndiciError { + name: 'RequestContentLengthMismatchError'; + code: 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'; + } + + /** Response body length does not match content-length header. */ + export class ResponseContentLengthMismatchError extends UndiciError { + name: 'ResponseContentLengthMismatchError'; + code: 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'; + } + + /** Trying to use a destroyed client. */ + export class ClientDestroyedError extends UndiciError { + name: 'ClientDestroyedError'; + code: 'UND_ERR_DESTROYED'; + } + + /** Trying to use a closed client. */ + export class ClientClosedError extends UndiciError { + name: 'ClientClosedError'; + code: 'UND_ERR_CLOSED'; + } + + /** There is an error with the socket. */ + export class SocketError extends UndiciError { + name: 'SocketError'; + code: 'UND_ERR_SOCKET'; + socket: Client.SocketInfo | null + } + + /** Encountered unsupported functionality. */ + export class NotSupportedError extends UndiciError { + name: 'NotSupportedError'; + code: 'UND_ERR_NOT_SUPPORTED'; + } + + /** No upstream has been added to the BalancedPool. */ + export class BalancedPoolMissingUpstreamError extends UndiciError { + name: 'MissingUpstreamError'; + code: 'UND_ERR_BPL_MISSING_UPSTREAM'; + } + + export class HTTPParserError extends UndiciError { + name: 'HTTPParserError'; + code: string; + } + + /** The response exceed the length allowed. */ + export class ResponseExceededMaxSizeError extends UndiciError { + name: 'ResponseExceededMaxSizeError'; + code: 'UND_ERR_RES_EXCEEDED_MAX_SIZE'; + } +} diff --git a/node_modules/undici-types/fetch.d.ts b/node_modules/undici-types/fetch.d.ts new file mode 100644 index 0000000..fa4619c --- /dev/null +++ b/node_modules/undici-types/fetch.d.ts @@ -0,0 +1,209 @@ +// based on https://github.com/Ethan-Arrowood/undici-fetch/blob/249269714db874351589d2d364a0645d5160ae71/index.d.ts (MIT license) +// and https://github.com/node-fetch/node-fetch/blob/914ce6be5ec67a8bab63d68510aabf07cb818b6d/index.d.ts (MIT license) +/// + +import { Blob } from 'buffer' +import { URL, URLSearchParams } from 'url' +import { ReadableStream } from 'stream/web' +import { FormData } from './formdata' + +import Dispatcher from './dispatcher' + +export type RequestInfo = string | URL | Request + +export declare function fetch ( + input: RequestInfo, + init?: RequestInit +): Promise + +export type BodyInit = + | ArrayBuffer + | AsyncIterable + | Blob + | FormData + | Iterable + | NodeJS.ArrayBufferView + | URLSearchParams + | null + | string + +export interface BodyMixin { + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise +} + +export interface SpecIterator { + next(...args: [] | [TNext]): IteratorResult; +} + +export interface SpecIterableIterator extends SpecIterator { + [Symbol.iterator](): SpecIterableIterator; +} + +export interface SpecIterable { + [Symbol.iterator](): SpecIterator; +} + +export type HeadersInit = string[][] | Record> | Headers + +export declare class Headers implements SpecIterable<[string, string]> { + constructor (init?: HeadersInit) + readonly append: (name: string, value: string) => void + readonly delete: (name: string) => void + readonly get: (name: string) => string | null + readonly has: (name: string) => boolean + readonly set: (name: string, value: string) => void + readonly getSetCookie: () => string[] + readonly forEach: ( + callbackfn: (value: string, key: string, iterable: Headers) => void, + thisArg?: unknown + ) => void + + readonly keys: () => SpecIterableIterator + readonly values: () => SpecIterableIterator + readonly entries: () => SpecIterableIterator<[string, string]> + readonly [Symbol.iterator]: () => SpecIterator<[string, string]> +} + +export type RequestCache = + | 'default' + | 'force-cache' + | 'no-cache' + | 'no-store' + | 'only-if-cached' + | 'reload' + +export type RequestCredentials = 'omit' | 'include' | 'same-origin' + +type RequestDestination = + | '' + | 'audio' + | 'audioworklet' + | 'document' + | 'embed' + | 'font' + | 'image' + | 'manifest' + | 'object' + | 'paintworklet' + | 'report' + | 'script' + | 'sharedworker' + | 'style' + | 'track' + | 'video' + | 'worker' + | 'xslt' + +export interface RequestInit { + method?: string + keepalive?: boolean + headers?: HeadersInit + body?: BodyInit + redirect?: RequestRedirect + integrity?: string + signal?: AbortSignal + credentials?: RequestCredentials + mode?: RequestMode + referrer?: string + referrerPolicy?: ReferrerPolicy + window?: null + dispatcher?: Dispatcher + duplex?: RequestDuplex +} + +export type ReferrerPolicy = + | '' + | 'no-referrer' + | 'no-referrer-when-downgrade' + | 'origin' + | 'origin-when-cross-origin' + | 'same-origin' + | 'strict-origin' + | 'strict-origin-when-cross-origin' + | 'unsafe-url'; + +export type RequestMode = 'cors' | 'navigate' | 'no-cors' | 'same-origin' + +export type RequestRedirect = 'error' | 'follow' | 'manual' + +export type RequestDuplex = 'half' + +export declare class Request implements BodyMixin { + constructor (input: RequestInfo, init?: RequestInit) + + readonly cache: RequestCache + readonly credentials: RequestCredentials + readonly destination: RequestDestination + readonly headers: Headers + readonly integrity: string + readonly method: string + readonly mode: RequestMode + readonly redirect: RequestRedirect + readonly referrerPolicy: string + readonly url: string + + readonly keepalive: boolean + readonly signal: AbortSignal + readonly duplex: RequestDuplex + + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise + + readonly clone: () => Request +} + +export interface ResponseInit { + readonly status?: number + readonly statusText?: string + readonly headers?: HeadersInit +} + +export type ResponseType = + | 'basic' + | 'cors' + | 'default' + | 'error' + | 'opaque' + | 'opaqueredirect' + +export type ResponseRedirectStatus = 301 | 302 | 303 | 307 | 308 + +export declare class Response implements BodyMixin { + constructor (body?: BodyInit, init?: ResponseInit) + + readonly headers: Headers + readonly ok: boolean + readonly status: number + readonly statusText: string + readonly type: ResponseType + readonly url: string + readonly redirected: boolean + + readonly body: ReadableStream | null + readonly bodyUsed: boolean + + readonly arrayBuffer: () => Promise + readonly blob: () => Promise + readonly formData: () => Promise + readonly json: () => Promise + readonly text: () => Promise + + readonly clone: () => Response + + static error (): Response + static json(data: any, init?: ResponseInit): Response + static redirect (url: string | URL, status: ResponseRedirectStatus): Response +} diff --git a/node_modules/undici-types/file.d.ts b/node_modules/undici-types/file.d.ts new file mode 100644 index 0000000..c695b7a --- /dev/null +++ b/node_modules/undici-types/file.d.ts @@ -0,0 +1,39 @@ +// Based on https://github.com/octet-stream/form-data/blob/2d0f0dc371517444ce1f22cdde13f51995d0953a/lib/File.ts (MIT) +/// + +import { Blob } from 'buffer' + +export interface BlobPropertyBag { + type?: string + endings?: 'native' | 'transparent' +} + +export interface FilePropertyBag extends BlobPropertyBag { + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + lastModified?: number +} + +export declare class File extends Blob { + /** + * Creates a new File instance. + * + * @param fileBits An `Array` strings, or [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), [`ArrayBufferView`](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView), [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects, or a mix of any of such objects, that will be put inside the [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). + * @param fileName The name of the file. + * @param options An options object containing optional attributes for the file. + */ + constructor(fileBits: ReadonlyArray, fileName: string, options?: FilePropertyBag) + + /** + * Name of the file referenced by the File object. + */ + readonly name: string + + /** + * The last modified date of the file as the number of milliseconds since the Unix epoch (January 1, 1970 at midnight). Files without a known last modified date return the current date. + */ + readonly lastModified: number + + readonly [Symbol.toStringTag]: string +} diff --git a/node_modules/undici-types/filereader.d.ts b/node_modules/undici-types/filereader.d.ts new file mode 100644 index 0000000..f05d231 --- /dev/null +++ b/node_modules/undici-types/filereader.d.ts @@ -0,0 +1,54 @@ +/// + +import { Blob } from 'buffer' +import { DOMException, Event, EventInit, EventTarget } from './patch' + +export declare class FileReader { + __proto__: EventTarget & FileReader + + constructor () + + readAsArrayBuffer (blob: Blob): void + readAsBinaryString (blob: Blob): void + readAsText (blob: Blob, encoding?: string): void + readAsDataURL (blob: Blob): void + + abort (): void + + static readonly EMPTY = 0 + static readonly LOADING = 1 + static readonly DONE = 2 + + readonly EMPTY = 0 + readonly LOADING = 1 + readonly DONE = 2 + + readonly readyState: number + + readonly result: string | ArrayBuffer | null + + readonly error: DOMException | null + + onloadstart: null | ((this: FileReader, event: ProgressEvent) => void) + onprogress: null | ((this: FileReader, event: ProgressEvent) => void) + onload: null | ((this: FileReader, event: ProgressEvent) => void) + onabort: null | ((this: FileReader, event: ProgressEvent) => void) + onerror: null | ((this: FileReader, event: ProgressEvent) => void) + onloadend: null | ((this: FileReader, event: ProgressEvent) => void) +} + +export interface ProgressEventInit extends EventInit { + lengthComputable?: boolean + loaded?: number + total?: number +} + +export declare class ProgressEvent { + __proto__: Event & ProgressEvent + + constructor (type: string, eventInitDict?: ProgressEventInit) + + readonly lengthComputable: boolean + readonly loaded: number + readonly total: number +} diff --git a/node_modules/undici-types/formdata.d.ts b/node_modules/undici-types/formdata.d.ts new file mode 100644 index 0000000..df29a57 --- /dev/null +++ b/node_modules/undici-types/formdata.d.ts @@ -0,0 +1,108 @@ +// Based on https://github.com/octet-stream/form-data/blob/2d0f0dc371517444ce1f22cdde13f51995d0953a/lib/FormData.ts (MIT) +/// + +import { File } from './file' +import { SpecIterator, SpecIterableIterator } from './fetch' + +/** + * A `string` or `File` that represents a single value from a set of `FormData` key-value pairs. + */ +declare type FormDataEntryValue = string | File + +/** + * Provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using fetch(). + */ +export declare class FormData { + /** + * Appends a new value onto an existing key inside a FormData object, + * or adds the key if it does not already exist. + * + * The difference between `set()` and `append()` is that if the specified key already exists, `set()` will overwrite all existing values with the new one, whereas `append()` will append the new value onto the end of the existing set of values. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + */ + append(name: string, value: unknown, fileName?: string): void + + /** + * Set a new value for an existing key inside FormData, + * or add the new field if it does not already exist. + * + * @param name The name of the field whose data is contained in `value`. + * @param value The field's value. This can be [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) + or [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File). If none of these are specified the value is converted to a string. + * @param fileName The filename reported to the server, when a Blob or File is passed as the second parameter. The default filename for Blob objects is "blob". The default filename for File objects is the file's filename. + * + */ + set(name: string, value: unknown, fileName?: string): void + + /** + * Returns the first value associated with a given key from within a `FormData` object. + * If you expect multiple values and want all of them, use the `getAll()` method instead. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns A `FormDataEntryValue` containing the value. If the key doesn't exist, the method returns null. + */ + get(name: string): FormDataEntryValue | null + + /** + * Returns all the values associated with a given key from within a `FormData` object. + * + * @param {string} name A name of the value you want to retrieve. + * + * @returns An array of `FormDataEntryValue` whose key matches the value passed in the `name` parameter. If the key doesn't exist, the method returns an empty list. + */ + getAll(name: string): FormDataEntryValue[] + + /** + * Returns a boolean stating whether a `FormData` object contains a certain key. + * + * @param name A string representing the name of the key you want to test for. + * + * @return A boolean value. + */ + has(name: string): boolean + + /** + * Deletes a key and its value(s) from a `FormData` object. + * + * @param name The name of the key you want to delete. + */ + delete(name: string): void + + /** + * Executes given callback function for each field of the FormData instance + */ + forEach: ( + callbackfn: (value: FormDataEntryValue, key: string, iterable: FormData) => void, + thisArg?: unknown + ) => void + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all keys contained in this `FormData` object. + * Each key is a `string`. + */ + keys: () => SpecIterableIterator + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through all values contained in this object `FormData` object. + * Each value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + values: () => SpecIterableIterator + + /** + * Returns an [`iterator`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) allowing to go through the `FormData` key/value pairs. + * The key of each pair is a string; the value is a [`FormDataValue`](https://developer.mozilla.org/en-US/docs/Web/API/FormDataEntryValue). + */ + entries: () => SpecIterableIterator<[string, FormDataEntryValue]> + + /** + * An alias for FormData#entries() + */ + [Symbol.iterator]: () => SpecIterableIterator<[string, FormDataEntryValue]> + + readonly [Symbol.toStringTag]: string +} diff --git a/node_modules/undici-types/global-dispatcher.d.ts b/node_modules/undici-types/global-dispatcher.d.ts new file mode 100644 index 0000000..728f95c --- /dev/null +++ b/node_modules/undici-types/global-dispatcher.d.ts @@ -0,0 +1,9 @@ +import Dispatcher from "./dispatcher"; + +export { + getGlobalDispatcher, + setGlobalDispatcher +} + +declare function setGlobalDispatcher(dispatcher: DispatcherImplementation): void; +declare function getGlobalDispatcher(): Dispatcher; diff --git a/node_modules/undici-types/global-origin.d.ts b/node_modules/undici-types/global-origin.d.ts new file mode 100644 index 0000000..322542d --- /dev/null +++ b/node_modules/undici-types/global-origin.d.ts @@ -0,0 +1,7 @@ +export { + setGlobalOrigin, + getGlobalOrigin +} + +declare function setGlobalOrigin(origin: string | URL | undefined): void; +declare function getGlobalOrigin(): URL | undefined; \ No newline at end of file diff --git a/node_modules/undici-types/handlers.d.ts b/node_modules/undici-types/handlers.d.ts new file mode 100644 index 0000000..eb4f5a9 --- /dev/null +++ b/node_modules/undici-types/handlers.d.ts @@ -0,0 +1,9 @@ +import Dispatcher from "./dispatcher"; + +export declare class RedirectHandler implements Dispatcher.DispatchHandlers{ + constructor (dispatch: Dispatcher, maxRedirections: number, opts: Dispatcher.DispatchOptions, handler: Dispatcher.DispatchHandlers) +} + +export declare class DecoratorHandler implements Dispatcher.DispatchHandlers{ + constructor (handler: Dispatcher.DispatchHandlers) +} diff --git a/node_modules/undici-types/header.d.ts b/node_modules/undici-types/header.d.ts new file mode 100644 index 0000000..bfdb329 --- /dev/null +++ b/node_modules/undici-types/header.d.ts @@ -0,0 +1,4 @@ +/** + * The header type declaration of `undici`. + */ +export type IncomingHttpHeaders = Record; diff --git a/node_modules/undici-types/index.d.ts b/node_modules/undici-types/index.d.ts new file mode 100644 index 0000000..4589845 --- /dev/null +++ b/node_modules/undici-types/index.d.ts @@ -0,0 +1,63 @@ +import Dispatcher from'./dispatcher' +import { setGlobalDispatcher, getGlobalDispatcher } from './global-dispatcher' +import { setGlobalOrigin, getGlobalOrigin } from './global-origin' +import Pool from'./pool' +import { RedirectHandler, DecoratorHandler } from './handlers' + +import BalancedPool from './balanced-pool' +import Client from'./client' +import buildConnector from'./connector' +import errors from'./errors' +import Agent from'./agent' +import MockClient from'./mock-client' +import MockPool from'./mock-pool' +import MockAgent from'./mock-agent' +import mockErrors from'./mock-errors' +import ProxyAgent from'./proxy-agent' +import { request, pipeline, stream, connect, upgrade } from './api' + +export * from './cookies' +export * from './fetch' +export * from './file' +export * from './filereader' +export * from './formdata' +export * from './diagnostics-channel' +export * from './websocket' +export * from './content-type' +export * from './cache' +export { Interceptable } from './mock-interceptor' + +export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler } +export default Undici + +declare namespace Undici { + var Dispatcher: typeof import('./dispatcher').default + var Pool: typeof import('./pool').default; + var RedirectHandler: typeof import ('./handlers').RedirectHandler + var DecoratorHandler: typeof import ('./handlers').DecoratorHandler + var createRedirectInterceptor: typeof import ('./interceptors').createRedirectInterceptor + var BalancedPool: typeof import('./balanced-pool').default; + var Client: typeof import('./client').default; + var buildConnector: typeof import('./connector').default; + var errors: typeof import('./errors').default; + var Agent: typeof import('./agent').default; + var setGlobalDispatcher: typeof import('./global-dispatcher').setGlobalDispatcher; + var getGlobalDispatcher: typeof import('./global-dispatcher').getGlobalDispatcher; + var request: typeof import('./api').request; + var stream: typeof import('./api').stream; + var pipeline: typeof import('./api').pipeline; + var connect: typeof import('./api').connect; + var upgrade: typeof import('./api').upgrade; + var MockClient: typeof import('./mock-client').default; + var MockPool: typeof import('./mock-pool').default; + var MockAgent: typeof import('./mock-agent').default; + var mockErrors: typeof import('./mock-errors').default; + var fetch: typeof import('./fetch').fetch; + var Headers: typeof import('./fetch').Headers; + var Response: typeof import('./fetch').Response; + var Request: typeof import('./fetch').Request; + var FormData: typeof import('./formdata').FormData; + var File: typeof import('./file').File; + var FileReader: typeof import('./filereader').FileReader; + var caches: typeof import('./cache').caches; +} diff --git a/node_modules/undici-types/interceptors.d.ts b/node_modules/undici-types/interceptors.d.ts new file mode 100644 index 0000000..047ac17 --- /dev/null +++ b/node_modules/undici-types/interceptors.d.ts @@ -0,0 +1,5 @@ +import Dispatcher from "./dispatcher"; + +type RedirectInterceptorOpts = { maxRedirections?: number } + +export declare function createRedirectInterceptor (opts: RedirectInterceptorOpts): Dispatcher.DispatchInterceptor diff --git a/node_modules/undici-types/mock-agent.d.ts b/node_modules/undici-types/mock-agent.d.ts new file mode 100644 index 0000000..98cd645 --- /dev/null +++ b/node_modules/undici-types/mock-agent.d.ts @@ -0,0 +1,50 @@ +import Agent from './agent' +import Dispatcher from './dispatcher' +import { Interceptable, MockInterceptor } from './mock-interceptor' +import MockDispatch = MockInterceptor.MockDispatch; + +export default MockAgent + +interface PendingInterceptor extends MockDispatch { + origin: string; +} + +/** A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead. */ +declare class MockAgent extends Dispatcher { + constructor(options?: MockAgent.Options) + /** Creates and retrieves mock Dispatcher instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned. */ + get(origin: string): TInterceptable; + get(origin: RegExp): TInterceptable; + get(origin: ((origin: string) => boolean)): TInterceptable; + /** Dispatches a mocked request. */ + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock agent and waits for registered mock pools and clients to also close before resolving. */ + close(): Promise; + /** Disables mocking in MockAgent. */ + deactivate(): void; + /** Enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called. */ + activate(): void; + /** Define host matchers so only matching requests that aren't intercepted by the mock dispatchers will be attempted. */ + enableNetConnect(): void; + enableNetConnect(host: string): void; + enableNetConnect(host: RegExp): void; + enableNetConnect(host: ((host: string) => boolean)): void; + /** Causes all requests to throw when requests are not matched in a MockAgent intercept. */ + disableNetConnect(): void; + pendingInterceptors(): PendingInterceptor[]; + assertNoPendingInterceptors(options?: { + pendingInterceptorsFormatter?: PendingInterceptorsFormatter; + }): void; +} + +interface PendingInterceptorsFormatter { + format(pendingInterceptors: readonly PendingInterceptor[]): string; +} + +declare namespace MockAgent { + /** MockAgent options. */ + export interface Options extends Agent.Options { + /** A custom agent to be encapsulated by the MockAgent. */ + agent?: Agent; + } +} diff --git a/node_modules/undici-types/mock-client.d.ts b/node_modules/undici-types/mock-client.d.ts new file mode 100644 index 0000000..51d008c --- /dev/null +++ b/node_modules/undici-types/mock-client.d.ts @@ -0,0 +1,25 @@ +import Client from './client' +import Dispatcher from './dispatcher' +import MockAgent from './mock-agent' +import { MockInterceptor, Interceptable } from './mock-interceptor' + +export default MockClient + +/** MockClient extends the Client API and allows one to mock requests. */ +declare class MockClient extends Client implements Interceptable { + constructor(origin: string, options: MockClient.Options); + /** Intercepts any matching requests that use the same origin as this mock client. */ + intercept(options: MockInterceptor.Options): MockInterceptor; + /** Dispatches a mocked request. */ + dispatch(options: Dispatcher.DispatchOptions, handlers: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock client and gracefully waits for enqueued requests to complete. */ + close(): Promise; +} + +declare namespace MockClient { + /** MockClient options. */ + export interface Options extends Client.Options { + /** The agent to associate this MockClient with. */ + agent: MockAgent; + } +} diff --git a/node_modules/undici-types/mock-errors.d.ts b/node_modules/undici-types/mock-errors.d.ts new file mode 100644 index 0000000..3d9e727 --- /dev/null +++ b/node_modules/undici-types/mock-errors.d.ts @@ -0,0 +1,12 @@ +import Errors from './errors' + +export default MockErrors + +declare namespace MockErrors { + /** The request does not match any registered mock dispatches. */ + export class MockNotMatchedError extends Errors.UndiciError { + constructor(message?: string); + name: 'MockNotMatchedError'; + code: 'UND_MOCK_ERR_MOCK_NOT_MATCHED'; + } +} diff --git a/node_modules/undici-types/mock-interceptor.d.ts b/node_modules/undici-types/mock-interceptor.d.ts new file mode 100644 index 0000000..6b3961c --- /dev/null +++ b/node_modules/undici-types/mock-interceptor.d.ts @@ -0,0 +1,93 @@ +import { IncomingHttpHeaders } from './header' +import Dispatcher from './dispatcher'; +import { BodyInit, Headers } from './fetch' + +export { + Interceptable, + MockInterceptor, + MockScope +} + +/** The scope associated with a mock dispatch. */ +declare class MockScope { + constructor(mockDispatch: MockInterceptor.MockDispatch); + /** Delay a reply by a set amount of time in ms. */ + delay(waitInMs: number): MockScope; + /** Persist the defined mock data for the associated reply. It will return the defined mock data indefinitely. */ + persist(): MockScope; + /** Define a reply for a set amount of matching requests. */ + times(repeatTimes: number): MockScope; +} + +/** The interceptor for a Mock. */ +declare class MockInterceptor { + constructor(options: MockInterceptor.Options, mockDispatches: MockInterceptor.MockDispatch[]); + /** Mock an undici request with the defined reply. */ + reply(replyOptionsCallback: MockInterceptor.MockReplyOptionsCallback): MockScope; + reply( + statusCode: number, + data?: TData | Buffer | string | MockInterceptor.MockResponseDataHandler, + responseOptions?: MockInterceptor.MockResponseOptions + ): MockScope; + /** Mock an undici request by throwing the defined reply error. */ + replyWithError(error: TError): MockScope; + /** Set default reply headers on the interceptor for subsequent mocked replies. */ + defaultReplyHeaders(headers: IncomingHttpHeaders): MockInterceptor; + /** Set default reply trailers on the interceptor for subsequent mocked replies. */ + defaultReplyTrailers(trailers: Record): MockInterceptor; + /** Set automatically calculated content-length header on subsequent mocked replies. */ + replyContentLength(): MockInterceptor; +} + +declare namespace MockInterceptor { + /** MockInterceptor options. */ + export interface Options { + /** Path to intercept on. */ + path: string | RegExp | ((path: string) => boolean); + /** Method to intercept on. Defaults to GET. */ + method?: string | RegExp | ((method: string) => boolean); + /** Body to intercept on. */ + body?: string | RegExp | ((body: string) => boolean); + /** Headers to intercept on. */ + headers?: Record boolean)> | ((headers: Record) => boolean); + /** Query params to intercept on */ + query?: Record; + } + export interface MockDispatch extends Options { + times: number | null; + persist: boolean; + consumed: boolean; + data: MockDispatchData; + } + export interface MockDispatchData extends MockResponseOptions { + error: TError | null; + statusCode?: number; + data?: TData | string; + } + export interface MockResponseOptions { + headers?: IncomingHttpHeaders; + trailers?: Record; + } + + export interface MockResponseCallbackOptions { + path: string; + origin: string; + method: string; + body?: BodyInit | Dispatcher.DispatchOptions['body']; + headers: Headers | Record; + maxRedirections: number; + } + + export type MockResponseDataHandler = ( + opts: MockResponseCallbackOptions + ) => TData | Buffer | string; + + export type MockReplyOptionsCallback = ( + opts: MockResponseCallbackOptions + ) => { statusCode: number, data?: TData | Buffer | string, responseOptions?: MockResponseOptions } +} + +interface Interceptable extends Dispatcher { + /** Intercepts any matching requests that use the same origin as this mock client. */ + intercept(options: MockInterceptor.Options): MockInterceptor; +} diff --git a/node_modules/undici-types/mock-pool.d.ts b/node_modules/undici-types/mock-pool.d.ts new file mode 100644 index 0000000..39e709a --- /dev/null +++ b/node_modules/undici-types/mock-pool.d.ts @@ -0,0 +1,25 @@ +import Pool from './pool' +import MockAgent from './mock-agent' +import { Interceptable, MockInterceptor } from './mock-interceptor' +import Dispatcher from './dispatcher' + +export default MockPool + +/** MockPool extends the Pool API and allows one to mock requests. */ +declare class MockPool extends Pool implements Interceptable { + constructor(origin: string, options: MockPool.Options); + /** Intercepts any matching requests that use the same origin as this mock pool. */ + intercept(options: MockInterceptor.Options): MockInterceptor; + /** Dispatches a mocked request. */ + dispatch(options: Dispatcher.DispatchOptions, handlers: Dispatcher.DispatchHandlers): boolean; + /** Closes the mock pool and gracefully waits for enqueued requests to complete. */ + close(): Promise; +} + +declare namespace MockPool { + /** MockPool options. */ + export interface Options extends Pool.Options { + /** The agent to associate this MockPool with. */ + agent: MockAgent; + } +} diff --git a/node_modules/undici-types/package.json b/node_modules/undici-types/package.json new file mode 100644 index 0000000..be7aa4c --- /dev/null +++ b/node_modules/undici-types/package.json @@ -0,0 +1,55 @@ +{ + "name": "undici-types", + "version": "5.26.5", + "description": "A stand-alone types package for Undici", + "homepage": "https://undici.nodejs.org", + "bugs": { + "url": "https://github.com/nodejs/undici/issues" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/nodejs/undici.git" + }, + "license": "MIT", + "types": "index.d.ts", + "files": [ + "*.d.ts" + ], + "contributors": [ + { + "name": "Daniele Belardi", + "url": "https://github.com/dnlup", + "author": true + }, + { + "name": "Ethan Arrowood", + "url": "https://github.com/ethan-arrowood", + "author": true + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina", + "author": true + }, + { + "name": "Matthew Aitken", + "url": "https://github.com/KhafraDev", + "author": true + }, + { + "name": "Robert Nagy", + "url": "https://github.com/ronag", + "author": true + }, + { + "name": "Szymon Marczak", + "url": "https://github.com/szmarczak", + "author": true + }, + { + "name": "Tomas Della Vedova", + "url": "https://github.com/delvedor", + "author": true + } + ] +} \ No newline at end of file diff --git a/node_modules/undici-types/patch.d.ts b/node_modules/undici-types/patch.d.ts new file mode 100644 index 0000000..3871acf --- /dev/null +++ b/node_modules/undici-types/patch.d.ts @@ -0,0 +1,71 @@ +/// + +// See https://github.com/nodejs/undici/issues/1740 + +export type DOMException = typeof globalThis extends { DOMException: infer T } + ? T + : any + +export type EventTarget = typeof globalThis extends { EventTarget: infer T } + ? T + : { + addEventListener( + type: string, + listener: any, + options?: any, + ): void + dispatchEvent(event: Event): boolean + removeEventListener( + type: string, + listener: any, + options?: any | boolean, + ): void + } + +export type Event = typeof globalThis extends { Event: infer T } + ? T + : { + readonly bubbles: boolean + cancelBubble: () => void + readonly cancelable: boolean + readonly composed: boolean + composedPath(): [EventTarget?] + readonly currentTarget: EventTarget | null + readonly defaultPrevented: boolean + readonly eventPhase: 0 | 2 + readonly isTrusted: boolean + preventDefault(): void + returnValue: boolean + readonly srcElement: EventTarget | null + stopImmediatePropagation(): void + stopPropagation(): void + readonly target: EventTarget | null + readonly timeStamp: number + readonly type: string + } + +export interface EventInit { + bubbles?: boolean + cancelable?: boolean + composed?: boolean +} + +export interface EventListenerOptions { + capture?: boolean +} + +export interface AddEventListenerOptions extends EventListenerOptions { + once?: boolean + passive?: boolean + signal?: AbortSignal +} + +export type EventListenerOrEventListenerObject = EventListener | EventListenerObject + +export interface EventListenerObject { + handleEvent (object: Event): void +} + +export interface EventListener { + (evt: Event): void +} diff --git a/node_modules/undici-types/pool-stats.d.ts b/node_modules/undici-types/pool-stats.d.ts new file mode 100644 index 0000000..8b6d2bf --- /dev/null +++ b/node_modules/undici-types/pool-stats.d.ts @@ -0,0 +1,19 @@ +import Pool from "./pool" + +export default PoolStats + +declare class PoolStats { + constructor(pool: Pool); + /** Number of open socket connections in this pool. */ + connected: number; + /** Number of open socket connections in this pool that do not have an active request. */ + free: number; + /** Number of pending requests across all clients in this pool. */ + pending: number; + /** Number of queued requests across all clients in this pool. */ + queued: number; + /** Number of currently active requests across all clients in this pool. */ + running: number; + /** Number of active, pending, or queued requests across all clients in this pool. */ + size: number; +} diff --git a/node_modules/undici-types/pool.d.ts b/node_modules/undici-types/pool.d.ts new file mode 100644 index 0000000..7747d48 --- /dev/null +++ b/node_modules/undici-types/pool.d.ts @@ -0,0 +1,28 @@ +import Client from './client' +import TPoolStats from './pool-stats' +import { URL } from 'url' +import Dispatcher from "./dispatcher"; + +export default Pool + +declare class Pool extends Dispatcher { + constructor(url: string | URL, options?: Pool.Options) + /** `true` after `pool.close()` has been called. */ + closed: boolean; + /** `true` after `pool.destroyed()` has been called or `pool.close()` has been called and the pool shutdown has completed. */ + destroyed: boolean; + /** Aggregate stats for a Pool. */ + readonly stats: TPoolStats; +} + +declare namespace Pool { + export type PoolStats = TPoolStats; + export interface Options extends Client.Options { + /** Default: `(origin, opts) => new Client(origin, opts)`. */ + factory?(origin: URL, opts: object): Dispatcher; + /** The max number of clients to create. `null` if no limit. Default `null`. */ + connections?: number | null; + + interceptors?: { Pool?: readonly Dispatcher.DispatchInterceptor[] } & Client.Options["interceptors"] + } +} diff --git a/node_modules/undici-types/proxy-agent.d.ts b/node_modules/undici-types/proxy-agent.d.ts new file mode 100644 index 0000000..96b2638 --- /dev/null +++ b/node_modules/undici-types/proxy-agent.d.ts @@ -0,0 +1,30 @@ +import Agent from './agent' +import buildConnector from './connector'; +import Client from './client' +import Dispatcher from './dispatcher' +import { IncomingHttpHeaders } from './header' +import Pool from './pool' + +export default ProxyAgent + +declare class ProxyAgent extends Dispatcher { + constructor(options: ProxyAgent.Options | string) + + dispatch(options: Agent.DispatchOptions, handler: Dispatcher.DispatchHandlers): boolean; + close(): Promise; +} + +declare namespace ProxyAgent { + export interface Options extends Agent.Options { + uri: string; + /** + * @deprecated use opts.token + */ + auth?: string; + token?: string; + headers?: IncomingHttpHeaders; + requestTls?: buildConnector.BuildOptions; + proxyTls?: buildConnector.BuildOptions; + clientFactory?(origin: URL, opts: object): Dispatcher; + } +} diff --git a/node_modules/undici-types/readable.d.ts b/node_modules/undici-types/readable.d.ts new file mode 100644 index 0000000..4549a8c --- /dev/null +++ b/node_modules/undici-types/readable.d.ts @@ -0,0 +1,61 @@ +import { Readable } from "stream"; +import { Blob } from 'buffer' + +export default BodyReadable + +declare class BodyReadable extends Readable { + constructor( + resume?: (this: Readable, size: number) => void | null, + abort?: () => void | null, + contentType?: string + ) + + /** Consumes and returns the body as a string + * https://fetch.spec.whatwg.org/#dom-body-text + */ + text(): Promise + + /** Consumes and returns the body as a JavaScript Object + * https://fetch.spec.whatwg.org/#dom-body-json + */ + json(): Promise + + /** Consumes and returns the body as a Blob + * https://fetch.spec.whatwg.org/#dom-body-blob + */ + blob(): Promise + + /** Consumes and returns the body as an ArrayBuffer + * https://fetch.spec.whatwg.org/#dom-body-arraybuffer + */ + arrayBuffer(): Promise + + /** Not implemented + * + * https://fetch.spec.whatwg.org/#dom-body-formdata + */ + formData(): Promise + + /** Returns true if the body is not null and the body has been consumed + * + * Otherwise, returns false + * + * https://fetch.spec.whatwg.org/#dom-body-bodyused + */ + readonly bodyUsed: boolean + + /** Throws on node 16.6.0 + * + * If body is null, it should return null as the body + * + * If body is not null, should return the body as a ReadableStream + * + * https://fetch.spec.whatwg.org/#dom-body-body + */ + readonly body: never | undefined + + /** Dumps the response body by reading `limit` number of bytes. + * @param opts.limit Number of bytes to read (optional) - Default: 262144 + */ + dump(opts?: { limit: number }): Promise +} diff --git a/node_modules/undici-types/webidl.d.ts b/node_modules/undici-types/webidl.d.ts new file mode 100644 index 0000000..40cfe06 --- /dev/null +++ b/node_modules/undici-types/webidl.d.ts @@ -0,0 +1,220 @@ +// These types are not exported, and are only used internally + +/** + * Take in an unknown value and return one that is of type T + */ +type Converter = (object: unknown) => T + +type SequenceConverter = (object: unknown) => T[] + +type RecordConverter = (object: unknown) => Record + +interface ConvertToIntOpts { + clamp?: boolean + enforceRange?: boolean +} + +interface WebidlErrors { + exception (opts: { header: string, message: string }): TypeError + /** + * @description Throw an error when conversion from one type to another has failed + */ + conversionFailed (opts: { + prefix: string + argument: string + types: string[] + }): TypeError + /** + * @description Throw an error when an invalid argument is provided + */ + invalidArgument (opts: { + prefix: string + value: string + type: string + }): TypeError +} + +interface WebidlUtil { + /** + * @see https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values + */ + Type (object: unknown): + | 'Undefined' + | 'Boolean' + | 'String' + | 'Symbol' + | 'Number' + | 'BigInt' + | 'Null' + | 'Object' + + /** + * @see https://webidl.spec.whatwg.org/#abstract-opdef-converttoint + */ + ConvertToInt ( + V: unknown, + bitLength: number, + signedness: 'signed' | 'unsigned', + opts?: ConvertToIntOpts + ): number + + /** + * @see https://webidl.spec.whatwg.org/#abstract-opdef-converttoint + */ + IntegerPart (N: number): number +} + +interface WebidlConverters { + /** + * @see https://webidl.spec.whatwg.org/#es-DOMString + */ + DOMString (V: unknown, opts?: { + legacyNullToEmptyString: boolean + }): string + + /** + * @see https://webidl.spec.whatwg.org/#es-ByteString + */ + ByteString (V: unknown): string + + /** + * @see https://webidl.spec.whatwg.org/#es-USVString + */ + USVString (V: unknown): string + + /** + * @see https://webidl.spec.whatwg.org/#es-boolean + */ + boolean (V: unknown): boolean + + /** + * @see https://webidl.spec.whatwg.org/#es-any + */ + any (V: Value): Value + + /** + * @see https://webidl.spec.whatwg.org/#es-long-long + */ + ['long long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-long-long + */ + ['unsigned long long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-long + */ + ['unsigned long'] (V: unknown): number + + /** + * @see https://webidl.spec.whatwg.org/#es-unsigned-short + */ + ['unsigned short'] (V: unknown, opts?: ConvertToIntOpts): number + + /** + * @see https://webidl.spec.whatwg.org/#idl-ArrayBuffer + */ + ArrayBuffer (V: unknown): ArrayBufferLike + ArrayBuffer (V: unknown, opts: { allowShared: false }): ArrayBuffer + + /** + * @see https://webidl.spec.whatwg.org/#es-buffer-source-types + */ + TypedArray ( + V: unknown, + TypedArray: NodeJS.TypedArray | ArrayBufferLike + ): NodeJS.TypedArray | ArrayBufferLike + TypedArray ( + V: unknown, + TypedArray: NodeJS.TypedArray | ArrayBufferLike, + opts?: { allowShared: false } + ): NodeJS.TypedArray | ArrayBuffer + + /** + * @see https://webidl.spec.whatwg.org/#es-buffer-source-types + */ + DataView (V: unknown, opts?: { allowShared: boolean }): DataView + + /** + * @see https://webidl.spec.whatwg.org/#BufferSource + */ + BufferSource ( + V: unknown, + opts?: { allowShared: boolean } + ): NodeJS.TypedArray | ArrayBufferLike | DataView + + ['sequence']: SequenceConverter + + ['sequence>']: SequenceConverter + + ['record']: RecordConverter + + [Key: string]: (...args: any[]) => unknown +} + +export interface Webidl { + errors: WebidlErrors + util: WebidlUtil + converters: WebidlConverters + + /** + * @description Performs a brand-check on {@param V} to ensure it is a + * {@param cls} object. + */ + brandCheck (V: unknown, cls: Interface, opts?: { strict?: boolean }): asserts V is Interface + + /** + * @see https://webidl.spec.whatwg.org/#es-sequence + * @description Convert a value, V, to a WebIDL sequence type. + */ + sequenceConverter (C: Converter): SequenceConverter + + illegalConstructor (): never + + /** + * @see https://webidl.spec.whatwg.org/#es-to-record + * @description Convert a value, V, to a WebIDL record type. + */ + recordConverter ( + keyConverter: Converter, + valueConverter: Converter + ): RecordConverter + + /** + * Similar to {@link Webidl.brandCheck} but allows skipping the check if third party + * interfaces are allowed. + */ + interfaceConverter (cls: Interface): ( + V: unknown, + opts?: { strict: boolean } + ) => asserts V is typeof cls + + // TODO(@KhafraDev): a type could likely be implemented that can infer the return type + // from the converters given? + /** + * Converts a value, V, to a WebIDL dictionary types. Allows limiting which keys are + * allowed, values allowed, optional and required keys. Auto converts the value to + * a type given a converter. + */ + dictionaryConverter (converters: { + key: string, + defaultValue?: unknown, + required?: boolean, + converter: (...args: unknown[]) => unknown, + allowedValues?: unknown[] + }[]): (V: unknown) => Record + + /** + * @see https://webidl.spec.whatwg.org/#idl-nullable-type + * @description allows a type, V, to be null + */ + nullableConverter ( + converter: Converter + ): (V: unknown) => ReturnType | null + + argumentLengthCheck (args: { length: number }, min: number, context: { + header: string + message?: string + }): void +} diff --git a/node_modules/undici-types/websocket.d.ts b/node_modules/undici-types/websocket.d.ts new file mode 100644 index 0000000..15a357d --- /dev/null +++ b/node_modules/undici-types/websocket.d.ts @@ -0,0 +1,131 @@ +/// + +import type { Blob } from 'buffer' +import type { MessagePort } from 'worker_threads' +import { + EventTarget, + Event, + EventInit, + EventListenerOptions, + AddEventListenerOptions, + EventListenerOrEventListenerObject +} from './patch' +import Dispatcher from './dispatcher' +import { HeadersInit } from './fetch' + +export type BinaryType = 'blob' | 'arraybuffer' + +interface WebSocketEventMap { + close: CloseEvent + error: Event + message: MessageEvent + open: Event +} + +interface WebSocket extends EventTarget { + binaryType: BinaryType + + readonly bufferedAmount: number + readonly extensions: string + + onclose: ((this: WebSocket, ev: WebSocketEventMap['close']) => any) | null + onerror: ((this: WebSocket, ev: WebSocketEventMap['error']) => any) | null + onmessage: ((this: WebSocket, ev: WebSocketEventMap['message']) => any) | null + onopen: ((this: WebSocket, ev: WebSocketEventMap['open']) => any) | null + + readonly protocol: string + readonly readyState: number + readonly url: string + + close(code?: number, reason?: string): void + send(data: string | ArrayBufferLike | Blob | ArrayBufferView): void + + readonly CLOSED: number + readonly CLOSING: number + readonly CONNECTING: number + readonly OPEN: number + + addEventListener( + type: K, + listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, + options?: boolean | AddEventListenerOptions + ): void + addEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | AddEventListenerOptions + ): void + removeEventListener( + type: K, + listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any, + options?: boolean | EventListenerOptions + ): void + removeEventListener( + type: string, + listener: EventListenerOrEventListenerObject, + options?: boolean | EventListenerOptions + ): void +} + +export declare const WebSocket: { + prototype: WebSocket + new (url: string | URL, protocols?: string | string[] | WebSocketInit): WebSocket + readonly CLOSED: number + readonly CLOSING: number + readonly CONNECTING: number + readonly OPEN: number +} + +interface CloseEventInit extends EventInit { + code?: number + reason?: string + wasClean?: boolean +} + +interface CloseEvent extends Event { + readonly code: number + readonly reason: string + readonly wasClean: boolean +} + +export declare const CloseEvent: { + prototype: CloseEvent + new (type: string, eventInitDict?: CloseEventInit): CloseEvent +} + +interface MessageEventInit extends EventInit { + data?: T + lastEventId?: string + origin?: string + ports?: (typeof MessagePort)[] + source?: typeof MessagePort | null +} + +interface MessageEvent extends Event { + readonly data: T + readonly lastEventId: string + readonly origin: string + readonly ports: ReadonlyArray + readonly source: typeof MessagePort | null + initMessageEvent( + type: string, + bubbles?: boolean, + cancelable?: boolean, + data?: any, + origin?: string, + lastEventId?: string, + source?: typeof MessagePort | null, + ports?: (typeof MessagePort)[] + ): void; +} + +export declare const MessageEvent: { + prototype: MessageEvent + new(type: string, eventInitDict?: MessageEventInit): MessageEvent +} + +interface WebSocketInit { + protocols?: string | string[], + dispatcher?: Dispatcher, + headers?: HeadersInit +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..2e79f89 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/uuid/CHANGELOG.md b/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..7519d19 --- /dev/null +++ b/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,229 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/uuid/CONTRIBUTING.md b/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 0000000..4a4503d --- /dev/null +++ b/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/uuid/LICENSE.md b/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..3934168 --- /dev/null +++ b/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/uuid/README.md b/node_modules/uuid/README.md new file mode 100644 index 0000000..ed27e57 --- /dev/null +++ b/node_modules/uuid/README.md @@ -0,0 +1,505 @@ + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - Node 8, 10, 12, 14 + - Chrome, Safari, Firefox, Edge, IE 11 browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +**Upgrading from `uuid@3.x`?** Your code is probably okay, but check out [Upgrading From `uuid@3.x`](#upgrading-from-uuid3x) for details. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, + 0xc0, + 0xbd, + 0x7f, + 0x11, + 0xc0, + 0x43, + 0xda, + 0x97, + 0x5e, + 0x2a, + 0x8a, + 0xd9, + 0xeb, + 0xae, + 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, + 0x91, + 0x56, + 0xbe, + 0xc4, + 0xfb, + 0xc1, + 0xea, + 0x71, + 0xb4, + 0xef, + 0xe1, + 0x67, + 0x1c, + 0x58, + 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +To load this module directly into older browsers you can use the [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds from any of the following CDNs: + +**Using [UNPKG](https://unpkg.com/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [jsDelivr](https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [cdnjs](https://cdnjs.com/libraries/uuid)**: + +```html + +``` + +These CDNs all provide the same [`uuidv4()`](#uuidv4options-buffer-offset) method: + +```html + +``` + +Methods for the other algorithms ([`uuidv1()`](#uuidv1options-buffer-offset), [`uuidv3()`](#uuidv3name-namespace-buffer-offset) and [`uuidv5()`](#uuidv5name-namespace-buffer-offset)) are available from the files `uuidv1.min.js`, `uuidv3.min.js` and `uuidv5.min.js` respectively. + +## "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +## Upgrading From `uuid@7.x` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7.x` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7.x`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3.x` + +"_Wait... what happened to `uuid@4.x` - `uuid@6.x`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3.x` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7.x` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3.x` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3.x` and has been removed in `uuid@7.x`. + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/uuid/dist/bin/uuid b/node_modules/uuid/dist/bin/uuid new file mode 100644 index 0000000..f38d2ee --- /dev/null +++ b/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/uuid/dist/esm-browser/index.js b/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 0000000..8b5d46a --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (var i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + var output = []; + var length32 = input.length * 32; + var hexTab = '0123456789abcdef'; + + for (var i = 0; i < length32; i += 8) { + var x = input[i >> 5] >>> i % 32 & 0xff; + var hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + var a = 1732584193; + var b = -271733879; + var c = -1732584194; + var d = 271733878; + + for (var i = 0; i < x.length; i += 16) { + var olda = a; + var oldb = b; + var oldc = c; + var oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + var length8 = input.length * 8; + var output = new Uint32Array(getOutputLength(length8)); + + for (var i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + var lsw = (x & 0xffff) + (y & 0xffff); + var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 0000000..7c5b1d5 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + var v; + var arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 0000000..8abbf2e --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,19 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +var getRandomValues; +var rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 0000000..940548b --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (var i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + var l = bytes.length / 4 + 2; + var N = Math.ceil(l / 16); + var M = new Array(N); + + for (var _i = 0; _i < N; ++_i) { + var arr = new Uint32Array(16); + + for (var j = 0; j < 16; ++j) { + arr[j] = bytes[_i * 64 + j * 4] << 24 | bytes[_i * 64 + j * 4 + 1] << 16 | bytes[_i * 64 + j * 4 + 2] << 8 | bytes[_i * 64 + j * 4 + 3]; + } + + M[_i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (var _i2 = 0; _i2 < N; ++_i2) { + var W = new Uint32Array(80); + + for (var t = 0; t < 16; ++t) { + W[t] = M[_i2][t]; + } + + for (var _t = 16; _t < 80; ++_t) { + W[_t] = ROTL(W[_t - 3] ^ W[_t - 8] ^ W[_t - 14] ^ W[_t - 16], 1); + } + + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + + for (var _t2 = 0; _t2 < 80; ++_t2) { + var s = Math.floor(_t2 / 20); + var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[_t2] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 0000000..3102111 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,30 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +var byteToHex = []; + +for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 0000000..1a22591 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; + +var _clockseq; // Previous uuid creation time + + +var _lastMSecs = 0; +var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || new Array(16); + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + var seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + var msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + var tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 0000000..c9ab9a4 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +var v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 0000000..31dd8a1 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + var bytes = []; + + for (var i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export var DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export var URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + var bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 0000000..404810a --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 0000000..c08d96b --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +var v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-browser/version.js b/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 0000000..77530e9 --- /dev/null +++ b/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/index.js b/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/md5.js b/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 0000000..4d68b04 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/nil.js b/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/parse.js b/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/regex.js b/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/rng.js b/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 0000000..8006244 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 0000000..e23850b --- /dev/null +++ b/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 0000000..f9bca12 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,29 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/v1.js b/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 0000000..ebf81ac --- /dev/null +++ b/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/v3.js b/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/v35.js b/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 0000000..22f6a19 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/v4.js b/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 0000000..efad926 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/v5.js b/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/validate.js b/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/uuid/dist/esm-node/version.js b/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 0000000..77530e9 --- /dev/null +++ b/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/uuid/dist/index.js b/node_modules/uuid/dist/index.js new file mode 100644 index 0000000..bf13b10 --- /dev/null +++ b/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/uuid/dist/md5-browser.js b/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/md5.js b/node_modules/uuid/dist/md5.js new file mode 100644 index 0000000..824d481 --- /dev/null +++ b/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/nil.js b/node_modules/uuid/dist/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/parse.js b/node_modules/uuid/dist/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/regex.js b/node_modules/uuid/dist/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/rng-browser.js b/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 0000000..91faeae --- /dev/null +++ b/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,26 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/uuid/dist/rng.js b/node_modules/uuid/dist/rng.js new file mode 100644 index 0000000..3507f93 --- /dev/null +++ b/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/uuid/dist/sha1-browser.js b/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/sha1.js b/node_modules/uuid/dist/sha1.js new file mode 100644 index 0000000..03bdd63 --- /dev/null +++ b/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/stringify.js b/node_modules/uuid/dist/stringify.js new file mode 100644 index 0000000..b8e7519 --- /dev/null +++ b/node_modules/uuid/dist/stringify.js @@ -0,0 +1,39 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuid.min.js b/node_modules/uuid/dist/umd/uuid.min.js new file mode 100644 index 0000000..639ca2f --- /dev/null +++ b/node_modules/uuid/dist/umd/uuid.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((r="undefined"!=typeof globalThis?globalThis:r||self).uuid={})}(this,(function(r){"use strict";var e,n=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(n)}var o=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function a(r){return"string"==typeof r&&o.test(r)}for(var i,u,f=[],s=0;s<256;++s)f.push((s+256).toString(16).substr(1));function c(r){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(f[r[e+0]]+f[r[e+1]]+f[r[e+2]]+f[r[e+3]]+"-"+f[r[e+4]]+f[r[e+5]]+"-"+f[r[e+6]]+f[r[e+7]]+"-"+f[r[e+8]]+f[r[e+9]]+"-"+f[r[e+10]]+f[r[e+11]]+f[r[e+12]]+f[r[e+13]]+f[r[e+14]]+f[r[e+15]]).toLowerCase();if(!a(n))throw TypeError("Stringified UUID is invalid");return n}var l=0,d=0;function v(r){if(!a(r))throw TypeError("Invalid UUID");var e,n=new Uint8Array(16);return n[0]=(e=parseInt(r.slice(0,8),16))>>>24,n[1]=e>>>16&255,n[2]=e>>>8&255,n[3]=255&e,n[4]=(e=parseInt(r.slice(9,13),16))>>>8,n[5]=255&e,n[6]=(e=parseInt(r.slice(14,18),16))>>>8,n[7]=255&e,n[8]=(e=parseInt(r.slice(19,23),16))>>>8,n[9]=255&e,n[10]=(e=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=e/4294967296&255,n[12]=e>>>24&255,n[13]=e>>>16&255,n[14]=e>>>8&255,n[15]=255&e,n}function p(r,e,n){function t(r,t,o,a){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],n=0;n>>9<<4)+1}function y(r,e){var n=(65535&r)+(65535&e);return(r>>16)+(e>>16)+(n>>16)<<16|65535&n}function g(r,e,n,t,o,a){return y((i=y(y(e,r),y(t,a)))<<(u=o)|i>>>32-u,n);var i,u}function m(r,e,n,t,o,a,i){return g(e&n|~e&t,r,e,o,a,i)}function w(r,e,n,t,o,a,i){return g(e&t|n&~t,r,e,o,a,i)}function b(r,e,n,t,o,a,i){return g(e^n^t,r,e,o,a,i)}function A(r,e,n,t,o,a,i){return g(n^(e|~t),r,e,o,a,i)}var U=p("v3",48,(function(r){if("string"==typeof r){var e=unescape(encodeURIComponent(r));r=new Uint8Array(e.length);for(var n=0;n>5]>>>o%32&255,i=parseInt(t.charAt(a>>>4&15)+t.charAt(15&a),16);e.push(i)}return e}(function(r,e){r[e>>5]|=128<>5]|=(255&r[t/8])<>>32-e}var R=p("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],n=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var t=unescape(encodeURIComponent(r));r=[];for(var o=0;o>>0;w=m,m=g,g=C(y,30)>>>0,y=h,h=U}n[0]=n[0]+h>>>0,n[1]=n[1]+y>>>0,n[2]=n[2]+g>>>0,n[3]=n[3]+m>>>0,n[4]=n[4]+w>>>0}return[n[0]>>24&255,n[0]>>16&255,n[0]>>8&255,255&n[0],n[1]>>24&255,n[1]>>16&255,n[1]>>8&255,255&n[1],n[2]>>24&255,n[2]>>16&255,n[2]>>8&255,255&n[2],n[3]>>24&255,n[3]>>16&255,n[3]>>8&255,255&n[3],n[4]>>24&255,n[4]>>16&255,n[4]>>8&255,255&n[4]]}));r.NIL="00000000-0000-0000-0000-000000000000",r.parse=v,r.stringify=c,r.v1=function(r,e,n){var o=e&&n||0,a=e||new Array(16),f=(r=r||{}).node||i,s=void 0!==r.clockseq?r.clockseq:u;if(null==f||null==s){var v=r.random||(r.rng||t)();null==f&&(f=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==s&&(s=u=16383&(v[6]<<8|v[7]))}var p=void 0!==r.msecs?r.msecs:Date.now(),h=void 0!==r.nsecs?r.nsecs:d+1,y=p-l+(h-d)/1e4;if(y<0&&void 0===r.clockseq&&(s=s+1&16383),(y<0||p>l)&&void 0===r.nsecs&&(h=0),h>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");l=p,d=h,u=s;var g=(1e4*(268435455&(p+=122192928e5))+h)%4294967296;a[o++]=g>>>24&255,a[o++]=g>>>16&255,a[o++]=g>>>8&255,a[o++]=255&g;var m=p/4294967296*1e4&268435455;a[o++]=m>>>8&255,a[o++]=255&m,a[o++]=m>>>24&15|16,a[o++]=m>>>16&255,a[o++]=s>>>8|128,a[o++]=255&s;for(var w=0;w<6;++w)a[o+w]=f[w];return e||c(a)},r.v3=U,r.v4=function(r,e,n){var o=(r=r||{}).random||(r.rng||t)();if(o[6]=15&o[6]|64,o[8]=63&o[8]|128,e){n=n||0;for(var a=0;a<16;++a)e[n+a]=o[a];return e}return c(o)},r.v5=R,r.validate=a,r.version=function(r){if(!a(r))throw TypeError("Invalid UUID");return parseInt(r.substr(14,1),16)},Object.defineProperty(r,"__esModule",{value:!0})})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidNIL.min.js b/node_modules/uuid/dist/umd/uuidNIL.min.js new file mode 100644 index 0000000..30b28a7 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidNIL.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidNIL=n()}(this,(function(){"use strict";return"00000000-0000-0000-0000-000000000000"})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidParse.min.js b/node_modules/uuid/dist/umd/uuidParse.min.js new file mode 100644 index 0000000..d48ea6a --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidParse.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidParse=n()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(n){if(!function(n){return"string"==typeof n&&e.test(n)}(n))throw TypeError("Invalid UUID");var t,i=new Uint8Array(16);return i[0]=(t=parseInt(n.slice(0,8),16))>>>24,i[1]=t>>>16&255,i[2]=t>>>8&255,i[3]=255&t,i[4]=(t=parseInt(n.slice(9,13),16))>>>8,i[5]=255&t,i[6]=(t=parseInt(n.slice(14,18),16))>>>8,i[7]=255&t,i[8]=(t=parseInt(n.slice(19,23),16))>>>8,i[9]=255&t,i[10]=(t=parseInt(n.slice(24,36),16))/1099511627776&255,i[11]=t/4294967296&255,i[12]=t>>>24&255,i[13]=t>>>16&255,i[14]=t>>>8&255,i[15]=255&t,i}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidStringify.min.js b/node_modules/uuid/dist/umd/uuidStringify.min.js new file mode 100644 index 0000000..fd39adc --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidStringify.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidStringify=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function t(t){return"string"==typeof t&&e.test(t)}for(var i=[],n=0;n<256;++n)i.push((n+256).toString(16).substr(1));return function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,f=(i[e[n+0]]+i[e[n+1]]+i[e[n+2]]+i[e[n+3]]+"-"+i[e[n+4]]+i[e[n+5]]+"-"+i[e[n+6]]+i[e[n+7]]+"-"+i[e[n+8]]+i[e[n+9]]+"-"+i[e[n+10]]+i[e[n+11]]+i[e[n+12]]+i[e[n+13]]+i[e[n+14]]+i[e[n+15]]).toLowerCase();if(!t(f))throw TypeError("Stringified UUID is invalid");return f}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidValidate.min.js b/node_modules/uuid/dist/umd/uuidValidate.min.js new file mode 100644 index 0000000..378e5b9 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidValidate.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidValidate=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){return"string"==typeof t&&e.test(t)}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidVersion.min.js b/node_modules/uuid/dist/umd/uuidVersion.min.js new file mode 100644 index 0000000..274bb09 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidVersion.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidVersion=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){if(!function(t){return"string"==typeof t&&e.test(t)}(t))throw TypeError("Invalid UUID");return parseInt(t.substr(14,1),16)}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidv1.min.js b/node_modules/uuid/dist/umd/uuidv1.min.js new file mode 100644 index 0000000..2622889 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidv1.min.js @@ -0,0 +1 @@ +!function(e,o){"object"==typeof exports&&"undefined"!=typeof module?module.exports=o():"function"==typeof define&&define.amd?define(o):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidv1=o()}(this,(function(){"use strict";var e,o=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(o)}var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(e){return"string"==typeof e&&n.test(e)}for(var i,u,s=[],a=0;a<256;++a)s.push((a+256).toString(16).substr(1));var d=0,f=0;return function(e,o,n){var a=o&&n||0,c=o||new Array(16),l=(e=e||{}).node||i,p=void 0!==e.clockseq?e.clockseq:u;if(null==l||null==p){var v=e.random||(e.rng||t)();null==l&&(l=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==p&&(p=u=16383&(v[6]<<8|v[7]))}var y=void 0!==e.msecs?e.msecs:Date.now(),m=void 0!==e.nsecs?e.nsecs:f+1,g=y-d+(m-f)/1e4;if(g<0&&void 0===e.clockseq&&(p=p+1&16383),(g<0||y>d)&&void 0===e.nsecs&&(m=0),m>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");d=y,f=m,u=p;var h=(1e4*(268435455&(y+=122192928e5))+m)%4294967296;c[a++]=h>>>24&255,c[a++]=h>>>16&255,c[a++]=h>>>8&255,c[a++]=255&h;var w=y/4294967296*1e4&268435455;c[a++]=w>>>8&255,c[a++]=255&w,c[a++]=w>>>24&15|16,c[a++]=w>>>16&255,c[a++]=p>>>8|128,c[a++]=255&p;for(var b=0;b<6;++b)c[a+b]=l[b];return o||function(e){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,t=(s[e[o+0]]+s[e[o+1]]+s[e[o+2]]+s[e[o+3]]+"-"+s[e[o+4]]+s[e[o+5]]+"-"+s[e[o+6]]+s[e[o+7]]+"-"+s[e[o+8]]+s[e[o+9]]+"-"+s[e[o+10]]+s[e[o+11]]+s[e[o+12]]+s[e[o+13]]+s[e[o+14]]+s[e[o+15]]).toLowerCase();if(!r(t))throw TypeError("Stringified UUID is invalid");return t}(c)}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidv3.min.js b/node_modules/uuid/dist/umd/uuidv3.min.js new file mode 100644 index 0000000..8d37b62 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidv3.min.js @@ -0,0 +1 @@ +!function(n,r){"object"==typeof exports&&"undefined"!=typeof module?module.exports=r():"function"==typeof define&&define.amd?define(r):(n="undefined"!=typeof globalThis?globalThis:n||self).uuidv3=r()}(this,(function(){"use strict";var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(r){return"string"==typeof r&&n.test(r)}for(var e=[],t=0;t<256;++t)e.push((t+256).toString(16).substr(1));function i(n){return 14+(n+64>>>9<<4)+1}function o(n,r){var e=(65535&n)+(65535&r);return(n>>16)+(r>>16)+(e>>16)<<16|65535&e}function a(n,r,e,t,i,a){return o((f=o(o(r,n),o(t,a)))<<(u=i)|f>>>32-u,e);var f,u}function f(n,r,e,t,i,o,f){return a(r&e|~r&t,n,r,i,o,f)}function u(n,r,e,t,i,o,f){return a(r&t|e&~t,n,r,i,o,f)}function c(n,r,e,t,i,o,f){return a(r^e^t,n,r,i,o,f)}function s(n,r,e,t,i,o,f){return a(e^(r|~t),n,r,i,o,f)}return function(n,t,i){function o(n,o,a,f){if("string"==typeof n&&(n=function(n){n=unescape(encodeURIComponent(n));for(var r=[],e=0;e>>24,t[1]=e>>>16&255,t[2]=e>>>8&255,t[3]=255&e,t[4]=(e=parseInt(n.slice(9,13),16))>>>8,t[5]=255&e,t[6]=(e=parseInt(n.slice(14,18),16))>>>8,t[7]=255&e,t[8]=(e=parseInt(n.slice(19,23),16))>>>8,t[9]=255&e,t[10]=(e=parseInt(n.slice(24,36),16))/1099511627776&255,t[11]=e/4294967296&255,t[12]=e>>>24&255,t[13]=e>>>16&255,t[14]=e>>>8&255,t[15]=255&e,t}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var u=new Uint8Array(16+n.length);if(u.set(o),u.set(n,o.length),(u=i(u))[6]=15&u[6]|t,u[8]=63&u[8]|128,a){f=f||0;for(var c=0;c<16;++c)a[f+c]=u[c];return a}return function(n){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,i=(e[n[t+0]]+e[n[t+1]]+e[n[t+2]]+e[n[t+3]]+"-"+e[n[t+4]]+e[n[t+5]]+"-"+e[n[t+6]]+e[n[t+7]]+"-"+e[n[t+8]]+e[n[t+9]]+"-"+e[n[t+10]]+e[n[t+11]]+e[n[t+12]]+e[n[t+13]]+e[n[t+14]]+e[n[t+15]]).toLowerCase();if(!r(i))throw TypeError("Stringified UUID is invalid");return i}(u)}try{o.name=n}catch(n){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v3",48,(function(n){if("string"==typeof n){var r=unescape(encodeURIComponent(n));n=new Uint8Array(r.length);for(var e=0;e>5]>>>i%32&255,a=parseInt(t.charAt(o>>>4&15)+t.charAt(15&o),16);r.push(a)}return r}(function(n,r){n[r>>5]|=128<>5]|=(255&n[t/8])<1&&void 0!==arguments[1]?arguments[1]:0,o=(i[t[e+0]]+i[t[e+1]]+i[t[e+2]]+i[t[e+3]]+"-"+i[t[e+4]]+i[t[e+5]]+"-"+i[t[e+6]]+i[t[e+7]]+"-"+i[t[e+8]]+i[t[e+9]]+"-"+i[t[e+10]]+i[t[e+11]]+i[t[e+12]]+i[t[e+13]]+i[t[e+14]]+i[t[e+15]]).toLowerCase();if(!r(o))throw TypeError("Stringified UUID is invalid");return o}(u)}})); \ No newline at end of file diff --git a/node_modules/uuid/dist/umd/uuidv5.min.js b/node_modules/uuid/dist/umd/uuidv5.min.js new file mode 100644 index 0000000..ba6fc63 --- /dev/null +++ b/node_modules/uuid/dist/umd/uuidv5.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(r="undefined"!=typeof globalThis?globalThis:r||self).uuidv5=e()}(this,(function(){"use strict";var r=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function e(e){return"string"==typeof e&&r.test(e)}for(var t=[],n=0;n<256;++n)t.push((n+256).toString(16).substr(1));function a(r,e,t,n){switch(r){case 0:return e&t^~e&n;case 1:return e^t^n;case 2:return e&t^e&n^t&n;case 3:return e^t^n}}function o(r,e){return r<>>32-e}return function(r,n,a){function o(r,o,i,f){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],t=0;t>>24,n[1]=t>>>16&255,n[2]=t>>>8&255,n[3]=255&t,n[4]=(t=parseInt(r.slice(9,13),16))>>>8,n[5]=255&t,n[6]=(t=parseInt(r.slice(14,18),16))>>>8,n[7]=255&t,n[8]=(t=parseInt(r.slice(19,23),16))>>>8,n[9]=255&t,n[10]=(t=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=t/4294967296&255,n[12]=t>>>24&255,n[13]=t>>>16&255,n[14]=t>>>8&255,n[15]=255&t,n}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var s=new Uint8Array(16+r.length);if(s.set(o),s.set(r,o.length),(s=a(s))[6]=15&s[6]|n,s[8]=63&s[8]|128,i){f=f||0;for(var u=0;u<16;++u)i[f+u]=s[u];return i}return function(r){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,a=(t[r[n+0]]+t[r[n+1]]+t[r[n+2]]+t[r[n+3]]+"-"+t[r[n+4]]+t[r[n+5]]+"-"+t[r[n+6]]+t[r[n+7]]+"-"+t[r[n+8]]+t[r[n+9]]+"-"+t[r[n+10]]+t[r[n+11]]+t[r[n+12]]+t[r[n+13]]+t[r[n+14]]+t[r[n+15]]).toLowerCase();if(!e(a))throw TypeError("Stringified UUID is invalid");return a}(s)}try{o.name=r}catch(r){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],t=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var n=unescape(encodeURIComponent(r));r=[];for(var i=0;i>>0;A=U,U=w,w=o(b,30)>>>0,b=g,g=C}t[0]=t[0]+g>>>0,t[1]=t[1]+b>>>0,t[2]=t[2]+w>>>0,t[3]=t[3]+U>>>0,t[4]=t[4]+A>>>0}return[t[0]>>24&255,t[0]>>16&255,t[0]>>8&255,255&t[0],t[1]>>24&255,t[1]>>16&255,t[1]>>8&255,255&t[1],t[2]>>24&255,t[2]>>16&255,t[2]>>8&255,255&t[2],t[3]>>24&255,t[3]>>16&255,t[3]>>8&255,255&t[3],t[4]>>24&255,t[4]>>16&255,t[4]>>8&255,255&t[4]]}))})); \ No newline at end of file diff --git a/node_modules/uuid/dist/uuid-bin.js b/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 0000000..50a7a9f --- /dev/null +++ b/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/uuid/dist/v1.js b/node_modules/uuid/dist/v1.js new file mode 100644 index 0000000..abb9b3d --- /dev/null +++ b/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/v3.js b/node_modules/uuid/dist/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/v35.js b/node_modules/uuid/dist/v35.js new file mode 100644 index 0000000..f784c63 --- /dev/null +++ b/node_modules/uuid/dist/v35.js @@ -0,0 +1,78 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/uuid/dist/v4.js b/node_modules/uuid/dist/v4.js new file mode 100644 index 0000000..838ce0b --- /dev/null +++ b/node_modules/uuid/dist/v4.js @@ -0,0 +1,37 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/v5.js b/node_modules/uuid/dist/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/validate.js b/node_modules/uuid/dist/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/dist/version.js b/node_modules/uuid/dist/version.js new file mode 100644 index 0000000..b72949c --- /dev/null +++ b/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/uuid/package.json b/node_modules/uuid/package.json new file mode 100644 index 0000000..f0ab371 --- /dev/null +++ b/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "8.3.2", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.11.6", + "@babel/core": "7.11.6", + "@babel/preset-env": "7.11.5", + "@commitlint/cli": "11.0.0", + "@commitlint/config-conventional": "11.0.0", + "@rollup/plugin-node-resolve": "9.0.0", + "babel-eslint": "10.1.0", + "bundlewatch": "0.3.1", + "eslint": "7.10.0", + "eslint-config-prettier": "6.12.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.22.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "3.1.4", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "husky": "4.3.0", + "jest": "25.5.4", + "lint-staged": "10.4.0", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.1.2", + "random-seed": "0.3.0", + "rollup": "2.28.2", + "rollup-plugin-terser": "7.0.2", + "runmd": "1.3.2", + "standard-version": "9.0.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "6.4.0", + "@wdio/cli": "6.4.0", + "@wdio/jasmine-framework": "6.4.0", + "@wdio/local-runner": "6.4.0", + "@wdio/spec-reporter": "6.4.0", + "@wdio/static-server-service": "6.4.0", + "@wdio/sync": "6.4.0" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjs node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --ignore-path .prettierignore --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --ignore-path .prettierignore --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v12' ) && ( npm run build && runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/uuid/wrapper.mjs b/node_modules/uuid/wrapper.mjs new file mode 100644 index 0000000..c31e9ce --- /dev/null +++ b/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/webidl-conversions/LICENSE.md b/node_modules/webidl-conversions/LICENSE.md new file mode 100644 index 0000000..d4a994f --- /dev/null +++ b/node_modules/webidl-conversions/LICENSE.md @@ -0,0 +1,12 @@ +# The BSD 2-Clause License + +Copyright (c) 2014, Domenic Denicola +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/webidl-conversions/README.md b/node_modules/webidl-conversions/README.md new file mode 100644 index 0000000..3657890 --- /dev/null +++ b/node_modules/webidl-conversions/README.md @@ -0,0 +1,53 @@ +# WebIDL Type Conversions on JavaScript Values + +This package implements, in JavaScript, the algorithms to convert a given JavaScript value according to a given [WebIDL](http://heycam.github.io/webidl/) [type](http://heycam.github.io/webidl/#idl-types). + +The goal is that you should be able to write code like + +```js +const conversions = require("webidl-conversions"); + +function doStuff(x, y) { + x = conversions["boolean"](x); + y = conversions["unsigned long"](y); + // actual algorithm code here +} +``` + +and your function `doStuff` will behave the same as a WebIDL operation declared as + +```webidl +void doStuff(boolean x, unsigned long y); +``` + +## API + +This package's main module's default export is an object with a variety of methods, each corresponding to a different WebIDL type. Each method, when invoked on a JavaScript value, will give back the new JavaScript value that results after passing through the WebIDL conversion rules. (See below for more details on what that means.) Alternately, the method could throw an error, if the WebIDL algorithm is specified to do so: for example `conversions["float"](NaN)` [will throw a `TypeError`](http://heycam.github.io/webidl/#es-float). + +## Status + +All of the numeric types are implemented (float being implemented as double) and some others are as well - check the source for all of them. This list will grow over time in service of the [HTML as Custom Elements](https://github.com/dglazkov/html-as-custom-elements) project, but in the meantime, pull requests welcome! + +I'm not sure yet what the strategy will be for modifiers, e.g. [`[Clamp]`](http://heycam.github.io/webidl/#Clamp). Maybe something like `conversions["unsigned long"](x, { clamp: true })`? We'll see. + +We might also want to extend the API to give better error messages, e.g. "Argument 1 of HTMLMediaElement.fastSeek is not a finite floating-point value" instead of "Argument is not a finite floating-point value." This would require passing in more information to the conversion functions than we currently do. + +## Background + +What's actually going on here, conceptually, is pretty weird. Let's try to explain. + +WebIDL, as part of its madness-inducing design, has its own type system. When people write algorithms in web platform specs, they usually operate on WebIDL values, i.e. instances of WebIDL types. For example, if they were specifying the algorithm for our `doStuff` operation above, they would treat `x` as a WebIDL value of [WebIDL type `boolean`](http://heycam.github.io/webidl/#idl-boolean). Crucially, they would _not_ treat `x` as a JavaScript variable whose value is either the JavaScript `true` or `false`. They're instead working in a different type system altogether, with its own rules. + +Separately from its type system, WebIDL defines a ["binding"](http://heycam.github.io/webidl/#ecmascript-binding) of the type system into JavaScript. This contains rules like: when you pass a JavaScript value to the JavaScript method that manifests a given WebIDL operation, how does that get converted into a WebIDL value? For example, a JavaScript `true` passed in the position of a WebIDL `boolean` argument becomes a WebIDL `true`. But, a JavaScript `true` passed in the position of a [WebIDL `unsigned long`](http://heycam.github.io/webidl/#idl-unsigned-long) becomes a WebIDL `1`. And so on. + +Finally, we have the actual implementation code. This is usually C++, although these days [some smart people are using Rust](https://github.com/servo/servo). The implementation, of course, has its own type system. So when they implement the WebIDL algorithms, they don't actually use WebIDL values, since those aren't "real" outside of specs. Instead, implementations apply the WebIDL binding rules in such a way as to convert incoming JavaScript values into C++ values. For example, if code in the browser called `doStuff(true, true)`, then the implementation code would eventually receive a C++ `bool` containing `true` and a C++ `uint32_t` containing `1`. + +The upside of all this is that implementations can abstract all the conversion logic away, letting WebIDL handle it, and focus on implementing the relevant methods in C++ with values of the correct type already provided. That is payoff of WebIDL, in a nutshell. + +And getting to that payoff is the goal of _this_ project—but for JavaScript implementations, instead of C++ ones. That is, this library is designed to make it easier for JavaScript developers to write functions that behave like a given WebIDL operation. So conceptually, the conversion pipeline, which in its general form is JavaScript values ↦ WebIDL values ↦ implementation-language values, in this case becomes JavaScript values ↦ WebIDL values ↦ JavaScript values. And that intermediate step is where all the logic is performed: a JavaScript `true` becomes a WebIDL `1` in an unsigned long context, which then becomes a JavaScript `1`. + +## Don't Use This + +Seriously, why would you ever use this? You really shouldn't. WebIDL is … not great, and you shouldn't be emulating its semantics. If you're looking for a generic argument-processing library, you should find one with better rules than those from WebIDL. In general, your JavaScript should not be trying to become more like WebIDL; if anything, we should fix WebIDL to make it more like JavaScript. + +The _only_ people who should use this are those trying to create faithful implementations (or polyfills) of web platform interfaces defined in WebIDL. diff --git a/node_modules/webidl-conversions/lib/index.js b/node_modules/webidl-conversions/lib/index.js new file mode 100644 index 0000000..c5153a3 --- /dev/null +++ b/node_modules/webidl-conversions/lib/index.js @@ -0,0 +1,189 @@ +"use strict"; + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; +} + +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } +} + +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; + } +} + +conversions["void"] = function () { + return undefined; +}; + +conversions["boolean"] = function (val) { + return !!val; +}; + +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); + +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); + +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); + +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); + +conversions["double"] = function (V) { + const x = +V; + + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } + + return x; +}; + +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } + + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; diff --git a/node_modules/webidl-conversions/package.json b/node_modules/webidl-conversions/package.json new file mode 100644 index 0000000..c31bc07 --- /dev/null +++ b/node_modules/webidl-conversions/package.json @@ -0,0 +1,23 @@ +{ + "name": "webidl-conversions", + "version": "3.0.1", + "description": "Implements the WebIDL algorithms for converting to and from JavaScript values", + "main": "lib/index.js", + "scripts": { + "test": "mocha test/*.js" + }, + "repository": "jsdom/webidl-conversions", + "keywords": [ + "webidl", + "web", + "types" + ], + "files": [ + "lib/" + ], + "author": "Domenic Denicola (https://domenic.me/)", + "license": "BSD-2-Clause", + "devDependencies": { + "mocha": "^1.21.4" + } +} diff --git a/node_modules/whatwg-url/LICENSE.txt b/node_modules/whatwg-url/LICENSE.txt new file mode 100644 index 0000000..54dfac3 --- /dev/null +++ b/node_modules/whatwg-url/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015–2016 Sebastian Mayr + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/whatwg-url/README.md b/node_modules/whatwg-url/README.md new file mode 100644 index 0000000..4347a7f --- /dev/null +++ b/node_modules/whatwg-url/README.md @@ -0,0 +1,67 @@ +# whatwg-url + +whatwg-url is a full implementation of the WHATWG [URL Standard](https://url.spec.whatwg.org/). It can be used standalone, but it also exposes a lot of the internal algorithms that are useful for integrating a URL parser into a project like [jsdom](https://github.com/tmpvar/jsdom). + +## Current Status + +whatwg-url is currently up to date with the URL spec up to commit [a62223](https://github.com/whatwg/url/commit/a622235308342c9adc7fc2fd1659ff059f7d5e2a). + +## API + +### The `URL` Constructor + +The main API is the [`URL`](https://url.spec.whatwg.org/#url) export, which follows the spec's behavior in all ways (including e.g. `USVString` conversion). Most consumers of this library will want to use this. + +### Low-level URL Standard API + +The following methods are exported for use by places like jsdom that need to implement things like [`HTMLHyperlinkElementUtils`](https://html.spec.whatwg.org/#htmlhyperlinkelementutils). They operate on or return an "internal URL" or ["URL record"](https://url.spec.whatwg.org/#concept-url) type. + +- [URL parser](https://url.spec.whatwg.org/#concept-url-parser): `parseURL(input, { baseURL, encodingOverride })` +- [Basic URL parser](https://url.spec.whatwg.org/#concept-basic-url-parser): `basicURLParse(input, { baseURL, encodingOverride, url, stateOverride })` +- [URL serializer](https://url.spec.whatwg.org/#concept-url-serializer): `serializeURL(urlRecord, excludeFragment)` +- [Host serializer](https://url.spec.whatwg.org/#concept-host-serializer): `serializeHost(hostFromURLRecord)` +- [Serialize an integer](https://url.spec.whatwg.org/#serialize-an-integer): `serializeInteger(number)` +- [Origin](https://url.spec.whatwg.org/#concept-url-origin) [serializer](https://html.spec.whatwg.org/multipage/browsers.html#serialization-of-an-origin): `serializeURLOrigin(urlRecord)` +- [Set the username](https://url.spec.whatwg.org/#set-the-username): `setTheUsername(urlRecord, usernameString)` +- [Set the password](https://url.spec.whatwg.org/#set-the-password): `setThePassword(urlRecord, passwordString)` +- [Cannot have a username/password/port](https://url.spec.whatwg.org/#cannot-have-a-username-password-port): `cannotHaveAUsernamePasswordPort(urlRecord)` + +The `stateOverride` parameter is one of the following strings: + +- [`"scheme start"`](https://url.spec.whatwg.org/#scheme-start-state) +- [`"scheme"`](https://url.spec.whatwg.org/#scheme-state) +- [`"no scheme"`](https://url.spec.whatwg.org/#no-scheme-state) +- [`"special relative or authority"`](https://url.spec.whatwg.org/#special-relative-or-authority-state) +- [`"path or authority"`](https://url.spec.whatwg.org/#path-or-authority-state) +- [`"relative"`](https://url.spec.whatwg.org/#relative-state) +- [`"relative slash"`](https://url.spec.whatwg.org/#relative-slash-state) +- [`"special authority slashes"`](https://url.spec.whatwg.org/#special-authority-slashes-state) +- [`"special authority ignore slashes"`](https://url.spec.whatwg.org/#special-authority-ignore-slashes-state) +- [`"authority"`](https://url.spec.whatwg.org/#authority-state) +- [`"host"`](https://url.spec.whatwg.org/#host-state) +- [`"hostname"`](https://url.spec.whatwg.org/#hostname-state) +- [`"port"`](https://url.spec.whatwg.org/#port-state) +- [`"file"`](https://url.spec.whatwg.org/#file-state) +- [`"file slash"`](https://url.spec.whatwg.org/#file-slash-state) +- [`"file host"`](https://url.spec.whatwg.org/#file-host-state) +- [`"path start"`](https://url.spec.whatwg.org/#path-start-state) +- [`"path"`](https://url.spec.whatwg.org/#path-state) +- [`"cannot-be-a-base-URL path"`](https://url.spec.whatwg.org/#cannot-be-a-base-url-path-state) +- [`"query"`](https://url.spec.whatwg.org/#query-state) +- [`"fragment"`](https://url.spec.whatwg.org/#fragment-state) + +The URL record type has the following API: + +- [`scheme`](https://url.spec.whatwg.org/#concept-url-scheme) +- [`username`](https://url.spec.whatwg.org/#concept-url-username) +- [`password`](https://url.spec.whatwg.org/#concept-url-password) +- [`host`](https://url.spec.whatwg.org/#concept-url-host) +- [`port`](https://url.spec.whatwg.org/#concept-url-port) +- [`path`](https://url.spec.whatwg.org/#concept-url-path) (as an array) +- [`query`](https://url.spec.whatwg.org/#concept-url-query) +- [`fragment`](https://url.spec.whatwg.org/#concept-url-fragment) +- [`cannotBeABaseURL`](https://url.spec.whatwg.org/#url-cannot-be-a-base-url-flag) (as a boolean) + +These properties should be treated with care, as in general changing them will cause the URL record to be in an inconsistent state until the appropriate invocation of `basicURLParse` is used to fix it up. You can see examples of this in the URL Standard, where there are many step sequences like "4. Set context object’s url’s fragment to the empty string. 5. Basic URL parse _input_ with context object’s url as _url_ and fragment state as _state override_." In between those two steps, a URL record is in an unusable state. + +The return value of "failure" in the spec is represented by the string `"failure"`. That is, functions like `parseURL` and `basicURLParse` can return _either_ a URL record _or_ the string `"failure"`. diff --git a/node_modules/whatwg-url/lib/URL-impl.js b/node_modules/whatwg-url/lib/URL-impl.js new file mode 100644 index 0000000..dc7452c --- /dev/null +++ b/node_modules/whatwg-url/lib/URL-impl.js @@ -0,0 +1,200 @@ +"use strict"; +const usm = require("./url-state-machine"); + +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + + // TODO: query stuff + } + + get href() { + return usm.serializeURL(this._url); + } + + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + } + + get origin() { + return usm.serializeURLOrigin(this._url); + } + + get protocol() { + return this._url.scheme + ":"; + } + + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } + + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setTheUsername(this._url, v); + } + + get password() { + return this._url.password; + } + + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; + } + + if (url.port === null) { + return usm.serializeHost(url.host); + } + + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } + + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } + } + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); + } + + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); + } + + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; + } + + set search(v) { + // TODO: query stuff + + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); + } + + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } + + return "#" + this._url.fragment; + } + + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); + } + + toJSON() { + return this.href; + } +}; diff --git a/node_modules/whatwg-url/lib/URL.js b/node_modules/whatwg-url/lib/URL.js new file mode 100644 index 0000000..78c7207 --- /dev/null +++ b/node_modules/whatwg-url/lib/URL.js @@ -0,0 +1,196 @@ +"use strict"; + +const conversions = require("webidl-conversions"); +const utils = require("./utils.js"); +const Impl = require(".//URL-impl.js"); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); + } + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); + } + + module.exports.setup(this, args); +} + +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); + +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; + +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; + }, + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; + }, + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; + }, + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; + }, + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } + } +}; + diff --git a/node_modules/whatwg-url/lib/public-api.js b/node_modules/whatwg-url/lib/public-api.js new file mode 100644 index 0000000..932dcad --- /dev/null +++ b/node_modules/whatwg-url/lib/public-api.js @@ -0,0 +1,11 @@ +"use strict"; + +exports.URL = require("./URL").interface; +exports.serializeURL = require("./url-state-machine").serializeURL; +exports.serializeURLOrigin = require("./url-state-machine").serializeURLOrigin; +exports.basicURLParse = require("./url-state-machine").basicURLParse; +exports.setTheUsername = require("./url-state-machine").setTheUsername; +exports.setThePassword = require("./url-state-machine").setThePassword; +exports.serializeHost = require("./url-state-machine").serializeHost; +exports.serializeInteger = require("./url-state-machine").serializeInteger; +exports.parseURL = require("./url-state-machine").parseURL; diff --git a/node_modules/whatwg-url/lib/url-state-machine.js b/node_modules/whatwg-url/lib/url-state-machine.js new file mode 100644 index 0000000..27d977a --- /dev/null +++ b/node_modules/whatwg-url/lib/url-state-machine.js @@ -0,0 +1,1297 @@ +"use strict"; +const punycode = require("punycode"); +const tr46 = require("tr46"); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; diff --git a/node_modules/whatwg-url/lib/utils.js b/node_modules/whatwg-url/lib/utils.js new file mode 100644 index 0000000..a562009 --- /dev/null +++ b/node_modules/whatwg-url/lib/utils.js @@ -0,0 +1,20 @@ +"use strict"; + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + diff --git a/node_modules/whatwg-url/package.json b/node_modules/whatwg-url/package.json new file mode 100644 index 0000000..fce35ae --- /dev/null +++ b/node_modules/whatwg-url/package.json @@ -0,0 +1,32 @@ +{ + "name": "whatwg-url", + "version": "5.0.0", + "description": "An implementation of the WHATWG URL Standard's URL API and parsing machinery", + "main": "lib/public-api.js", + "files": [ + "lib/" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "repository": "jsdom/whatwg-url", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + }, + "devDependencies": { + "eslint": "^2.6.0", + "istanbul": "~0.4.3", + "mocha": "^2.2.4", + "recast": "~0.10.29", + "request": "^2.55.0", + "webidl2js": "^3.0.2" + }, + "scripts": { + "build": "node scripts/transform.js && node scripts/convert-idl.js", + "coverage": "istanbul cover node_modules/mocha/bin/_mocha", + "lint": "eslint .", + "prepublish": "npm run build", + "pretest": "node scripts/get-latest-platform-tests.js && npm run build", + "test": "mocha" + } +} diff --git a/package-lock.json b/package-lock.json index e746aad..e204028 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "0.0.2", "license": "Apache-2.0", "dependencies": { + "@google-cloud/storage": "^7.9.0", "express": "^4.17.1" }, "devDependencies": { @@ -26,6 +27,72 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, + "node_modules/@google-cloud/paginator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.0.tgz", + "integrity": "sha512-87aeg6QQcEPxGCOthnpUjvw4xAZ57G7pL8FS0C4e/81fr3FjkpUpibf1s2v5XGyGhUVGF4Jfg7yEcxqn2iUw1w==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.9.0.tgz", + "integrity": "sha512-PlFl7g3r91NmXtZHXsSEfTZES5ysD3SSBWmX4iBdQ2TFH7tN/Vn/IhnVELCHtgh1vc+uYPZ7XvRYaqtDCdghIA==", + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.1.3", + "ent": "^2.2.0", + "fast-xml-parser": "^4.3.0", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", @@ -60,12 +127,73 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", "dev": true }, + "node_modules/@types/node": { + "version": "20.12.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.2.tgz", + "integrity": "sha512-zQ0NYO87hyN6Xrclcqp7f8ZbXNbRfoGWNcMvHTPQp9UUrwI0mI7XBz+cu7/W6/VClYo2g63B0cjull/srU7LgQ==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/request/node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -78,6 +206,38 @@ "node": ">= 0.6" } }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/agent-base/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/agent-base/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", @@ -135,17 +295,32 @@ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", "dev": true }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dependencies": { + "retry": "0.13.1" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/balanced-match": { "version": "1.0.2", @@ -153,6 +328,33 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "engines": { + "node": "*" + } + }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", @@ -215,6 +417,11 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -358,7 +565,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -375,6 +581,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -479,7 +696,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, "engines": { "node": ">=0.4.0" } @@ -520,6 +736,25 @@ "node": ">=0.3.1" } }, + "node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -539,6 +774,19 @@ "node": ">= 0.8" } }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" + }, "node_modules/es-define-property": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", @@ -592,6 +840,14 @@ "node": ">= 0.6" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/express": { "version": "4.19.2", "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", @@ -633,12 +889,38 @@ "node": ">= 0.10.0" } }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, "node_modules/fast-safe-stringify": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true }, + "node_modules/fast-xml-parser": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.3.6.tgz", + "integrity": "sha512-M2SovcRxD4+vC493Uc2GZVcZaj66CCJhWurC4viynVSTvrpErCShNcDz1lAho6n9REQKvL/ll4A4/fw6Y9z8nw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, "node_modules/fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", @@ -779,6 +1061,32 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gaxios": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.3.0.tgz", + "integrity": "sha512-p+ggrQw3fBwH2F5N/PAI4k/G/y1art5OxKpb2J2chwNNHM4hHuAOtivjPuirMF4KNKwTTUal/lPfL2+7h2mEcg==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -837,6 +1145,22 @@ "node": ">= 6" } }, + "node_modules/google-auth-library": { + "version": "9.7.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.7.0.tgz", + "integrity": "sha512-I/AvzBiUXDzLOy4iIZ2W+Zq33W4lcukQv1nl7C8WUA6SQwyQwUwu3waNmWNAvzds//FG8SZ+DnKnW/2k6mQS8A==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/gopd": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", @@ -848,6 +1172,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -940,6 +1276,84 @@ "node": ">= 0.8" } }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -1034,6 +1448,17 @@ "node": ">=8" } }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", @@ -1100,6 +1525,33 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -1351,6 +1803,25 @@ "node": ">= 0.6" } }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -1383,7 +1854,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "dependencies": { "wrappy": "1" } @@ -1392,7 +1862,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -1527,6 +1996,19 @@ "node": ">= 0.8" } }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -1548,6 +2030,27 @@ "node": ">=0.10.0" } }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -1768,6 +2271,27 @@ "node": ">= 0.8" } }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==" + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -1806,6 +2330,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==" + }, "node_modules/superagent": { "version": "8.1.2", "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.1.2.tgz", @@ -1887,6 +2421,77 @@ "node": ">=8" } }, + "node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/teeny-request/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -1963,6 +2568,11 @@ "node": ">=0.6" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1975,6 +2585,11 @@ "node": ">= 0.6" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -1983,6 +2598,11 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1991,6 +2611,14 @@ "node": ">= 0.4.0" } }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/v8-to-istanbul": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz", @@ -2013,6 +2641,20 @@ "node": ">= 0.8" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -2054,8 +2696,7 @@ "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/y18n": { "version": "5.0.8", @@ -2118,7 +2759,6 @@ "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, "engines": { "node": ">=10" }, diff --git a/package.json b/package.json index 909281a..28d2fb9 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "test": "c8 mocha -p -j 2 --exit test/*.test.js" }, "dependencies": { + "@google-cloud/storage": "^7.9.0", "express": "^4.17.1" }, "devDependencies": {