From 85c85d368a6cdac67d2da1b0f241501accafe05d Mon Sep 17 00:00:00 2001 From: ExE Boss <3889017+ExE-Boss@users.noreply.github.com> Date: Sat, 29 Aug 2020 01:10:00 +0200 Subject: [PATCH 01/98] =?UTF-8?q?path:=20add=C2=A0`path/posix`=20and=C2=A0?= =?UTF-8?q?`path/win32`=20alias=C2=A0modules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/pull/31553 Refs: https://github.com/nodejs/node/pull/32953 Refs: https://github.com/nodejs/node/pull/33950 Refs: https://github.com/nodejs/node/pull/34001 Refs: https://github.com/nodejs/node/pull/34002 Refs: https://github.com/nodejs/node/pull/34055 PR-URL: https://github.com/nodejs/node/pull/34962 Reviewed-By: Myles Borins Reviewed-By: Ruben Bridgewater Reviewed-By: Matteo Collina Reviewed-By: Rich Trott Reviewed-By: James M Snell Reviewed-By: Gerhard Stöbich --- doc/api/path.md | 12 ++++++++++++ lib/path/posix.js | 3 +++ lib/path/win32.js | 3 +++ node.gyp | 2 ++ test/es-module/test-esm-path-posix.mjs | 6 ++++++ test/es-module/test-esm-path-win32.mjs | 6 ++++++ test/parallel/test-path-posix-exists.js | 6 ++++++ test/parallel/test-path-win32-exists.js | 6 ++++++ 8 files changed, 44 insertions(+) create mode 100644 lib/path/posix.js create mode 100644 lib/path/win32.js create mode 100644 test/es-module/test-esm-path-posix.mjs create mode 100644 test/es-module/test-esm-path-win32.mjs create mode 100644 test/parallel/test-path-posix-exists.js create mode 100644 test/parallel/test-path-win32-exists.js diff --git a/doc/api/path.md b/doc/api/path.md index c26a98e59c9d6d..36c281b772e1af 100644 --- a/doc/api/path.md +++ b/doc/api/path.md @@ -434,6 +434,10 @@ A [`TypeError`][] is thrown if `path` is not a string. ## `path.posix` * {Object} @@ -441,6 +445,8 @@ added: v0.11.15 The `path.posix` property provides access to POSIX specific implementations of the `path` methods. +The API is accessible via `require('path').posix` or `require('path/posix')`. + ## `path.relative(from, to)` * {Object} @@ -575,6 +585,8 @@ added: v0.11.15 The `path.win32` property provides access to Windows-specific implementations of the `path` methods. +The API is accessible via `require('path').win32` or `require('path/win32')`. + [MSDN-Rel-Path]: https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file#fully-qualified-vs-relative-paths [`TypeError`]: errors.md#errors_class_typeerror [`path.parse()`]: #path_path_parse_path diff --git a/lib/path/posix.js b/lib/path/posix.js new file mode 100644 index 00000000000000..aa8988d38481b2 --- /dev/null +++ b/lib/path/posix.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('path').posix; diff --git a/lib/path/win32.js b/lib/path/win32.js new file mode 100644 index 00000000000000..acb113aaae51ac --- /dev/null +++ b/lib/path/win32.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('path').win32; diff --git a/node.gyp b/node.gyp index d4dc83d257b0a3..38cf71309f8021 100644 --- a/node.gyp +++ b/node.gyp @@ -73,6 +73,8 @@ 'lib/net.js', 'lib/os.js', 'lib/path.js', + 'lib/path/posix.js', + 'lib/path/win32.js', 'lib/perf_hooks.js', 'lib/process.js', 'lib/punycode.js', diff --git a/test/es-module/test-esm-path-posix.mjs b/test/es-module/test-esm-path-posix.mjs new file mode 100644 index 00000000000000..e58e0603180e8f --- /dev/null +++ b/test/es-module/test-esm-path-posix.mjs @@ -0,0 +1,6 @@ +import '../common/index.mjs'; +import assert from 'assert'; +import { posix } from 'path'; +import pathPosix from 'path/posix'; + +assert.strictEqual(pathPosix, posix); diff --git a/test/es-module/test-esm-path-win32.mjs b/test/es-module/test-esm-path-win32.mjs new file mode 100644 index 00000000000000..3b3304410774bd --- /dev/null +++ b/test/es-module/test-esm-path-win32.mjs @@ -0,0 +1,6 @@ +import '../common/index.mjs'; +import assert from 'assert'; +import { win32 } from 'path'; +import pathWin32 from 'path/win32'; + +assert.strictEqual(pathWin32, win32); diff --git a/test/parallel/test-path-posix-exists.js b/test/parallel/test-path-posix-exists.js new file mode 100644 index 00000000000000..dc12ed6daf027f --- /dev/null +++ b/test/parallel/test-path-posix-exists.js @@ -0,0 +1,6 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); + +assert.strictEqual(require('path/posix'), require('path').posix); diff --git a/test/parallel/test-path-win32-exists.js b/test/parallel/test-path-win32-exists.js new file mode 100644 index 00000000000000..c9efa74dbd7d82 --- /dev/null +++ b/test/parallel/test-path-win32-exists.js @@ -0,0 +1,6 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); + +assert.strictEqual(require('path/win32'), require('path').win32); From baa87c1a7db97c1398a0cc3849b1632f6d131f7e Mon Sep 17 00:00:00 2001 From: ExE Boss <3889017+ExE-Boss@users.noreply.github.com> Date: Sat, 27 Jun 2020 15:40:00 +0200 Subject: [PATCH 02/98] =?UTF-8?q?util:=20add=C2=A0`util/types`=20alias?= =?UTF-8?q?=C2=A0module?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/pull/31553 Refs: https://github.com/nodejs/node/pull/32953 Refs: https://github.com/nodejs/node/pull/33950 Refs: https://github.com/nodejs/node/pull/34001 Refs: https://github.com/nodejs/node/pull/34002 PR-URL: https://github.com/nodejs/node/pull/34055 Refs: https://github.com/nodejs/node/pull/34962 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Myles Borins Reviewed-By: Ruben Bridgewater Reviewed-By: Rich Trott Reviewed-By: Joyee Cheung --- doc/api/util.md | 6 ++++++ lib/util/types.js | 3 +++ node.gyp | 1 + test/es-module/test-esm-util-types.mjs | 6 ++++++ test/parallel/test-util-types-exists.js | 6 ++++++ 5 files changed, 22 insertions(+) create mode 100644 lib/util/types.js create mode 100644 test/es-module/test-esm-util-types.mjs create mode 100644 test/parallel/test-util-types-exists.js diff --git a/doc/api/util.md b/doc/api/util.md index a5daf2cb240bfb..1cd8fef07e2976 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -1290,6 +1290,10 @@ The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. ## `util.types` `util.types` provides type checks for different kinds of built-in objects. @@ -1301,6 +1305,8 @@ The result generally does not make any guarantees about what kinds of properties or behavior a value exposes in JavaScript. They are primarily useful for addon developers who prefer to do type checking in JavaScript. +The API is accessible via `require('util').types` or `require('util/types')`. + ### `util.types.isAnyArrayBuffer(value)` + +* Returns: {string} the current prompt string + +The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + ### `rl.write(data[, key])` ## Technology Sponsors diff --git a/tools/node_modules/eslint/lib/linter/timing.js b/tools/node_modules/eslint/lib/linter/timing.js index 8396d9215b54dc..58230306855abe 100644 --- a/tools/node_modules/eslint/lib/linter/timing.js +++ b/tools/node_modules/eslint/lib/linter/timing.js @@ -44,6 +44,26 @@ const enabled = !!process.env.TIMING; const HEADERS = ["Rule", "Time (ms)", "Relative"]; const ALIGN = [alignLeft, alignRight, alignRight]; +/** + * Decide how many rules to show in the output list. + * @returns {number} the number of rules to show + */ +function getListSize() { + const MINIMUM_SIZE = 10; + + if (typeof process.env.TIMING !== "string") { + return MINIMUM_SIZE; + } + + if (process.env.TIMING.toLowerCase() === "all") { + return Number.POSITIVE_INFINITY; + } + + const TIMING_ENV_VAR_AS_INTEGER = Number.parseInt(process.env.TIMING, 10); + + return TIMING_ENV_VAR_AS_INTEGER > 10 ? TIMING_ENV_VAR_AS_INTEGER : MINIMUM_SIZE; +} + /* istanbul ignore next */ /** * display the data @@ -61,7 +81,7 @@ function display(data) { return [key, time]; }) .sort((a, b) => b[1] - a[1]) - .slice(0, 10); + .slice(0, getListSize()); rows.forEach(row => { row.push(`${(row[1] * 100 / total).toFixed(1)}%`); @@ -133,7 +153,8 @@ module.exports = (function() { return { time, - enabled + enabled, + getListSize }; }()); diff --git a/tools/node_modules/eslint/node_modules/import-fresh/index.js b/tools/node_modules/eslint/node_modules/import-fresh/index.js index 425ed98c42f68e..0a4c5d52f6d322 100644 --- a/tools/node_modules/eslint/node_modules/import-fresh/index.js +++ b/tools/node_modules/eslint/node_modules/import-fresh/index.js @@ -10,7 +10,8 @@ module.exports = moduleId => { const parentPath = parentModule(__filename); - const filePath = resolveFrom(path.dirname(parentPath), moduleId); + const cwd = parentPath ? path.dirname(parentPath) : __dirname; + const filePath = resolveFrom(cwd, moduleId); const oldModule = require.cache[filePath]; // Delete itself from module parent diff --git a/tools/node_modules/eslint/node_modules/import-fresh/package.json b/tools/node_modules/eslint/node_modules/import-fresh/package.json index 38892a62e43136..893bb4a523fbca 100644 --- a/tools/node_modules/eslint/node_modules/import-fresh/package.json +++ b/tools/node_modules/eslint/node_modules/import-fresh/package.json @@ -47,5 +47,5 @@ "heapdump": "node heapdump.js", "test": "xo && ava && tsd" }, - "version": "3.2.1" + "version": "3.2.2" } \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/trim-trailing-lines/index.js b/tools/node_modules/eslint/node_modules/trim-trailing-lines/index.js index 0f2d48b52fc567..eff85c6baedffb 100644 --- a/tools/node_modules/eslint/node_modules/trim-trailing-lines/index.js +++ b/tools/node_modules/eslint/node_modules/trim-trailing-lines/index.js @@ -2,16 +2,7 @@ module.exports = trimTrailingLines -var line = '\n' - // Remove final newline characters from `value`. function trimTrailingLines(value) { - var val = String(value) - var index = val.length - - while (val.charAt(--index) === line) { - // Empty - } - - return val.slice(0, index + 1) + return String(value).replace(/\n+$/, '') } diff --git a/tools/node_modules/eslint/node_modules/trim-trailing-lines/package.json b/tools/node_modules/eslint/node_modules/trim-trailing-lines/package.json index bbf4660f41bdda..c0242dc1299bdc 100644 --- a/tools/node_modules/eslint/node_modules/trim-trailing-lines/package.json +++ b/tools/node_modules/eslint/node_modules/trim-trailing-lines/package.json @@ -19,14 +19,14 @@ "deprecated": false, "description": "Remove final line feeds from a string", "devDependencies": { - "browserify": "^16.0.0", + "browserify": "^17.0.0", "nyc": "^15.0.0", - "prettier": "^1.0.0", - "remark-cli": "^7.0.0", - "remark-preset-wooorm": "^6.0.0", - "tape": "^4.0.0", - "tinyify": "^2.0.0", - "xo": "^0.25.0" + "prettier": "^2.0.0", + "remark-cli": "^9.0.0", + "remark-preset-wooorm": "^8.0.0", + "tape": "^5.0.0", + "tinyify": "^3.0.0", + "xo": "^0.34.0" }, "files": [ "index.js" @@ -72,12 +72,12 @@ "build": "npm run build-bundle && npm run build-mangle", "build-bundle": "browserify . -s trimTrailingLines -o trim-trailing-lines.js", "build-mangle": "browserify . -s trimTrailingLines -p tinyify -o trim-trailing-lines.min.js", - "format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix", + "format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix", "test": "npm run format && npm run build && npm run test-coverage", "test-api": "node test", "test-coverage": "nyc --reporter lcov tape test.js" }, - "version": "1.1.3", + "version": "1.1.4", "xo": { "prettier": true, "esnext": false, diff --git a/tools/node_modules/eslint/node_modules/v8-compile-cache/README.md b/tools/node_modules/eslint/node_modules/v8-compile-cache/README.md index 9580f6943205e1..6e0b99fbaa6304 100644 --- a/tools/node_modules/eslint/node_modules/v8-compile-cache/README.md +++ b/tools/node_modules/eslint/node_modules/v8-compile-cache/README.md @@ -26,9 +26,11 @@ The ability to tap into V8 to produce/consume this cache was introduced in [Node Set the environment variable `DISABLE_V8_COMPILE_CACHE=1` to disable the cache. +Cache directory is defined by environment variable `V8_COMPILE_CACHE_CACHE_DIR` or defaults to `/v8-compile-cache-`. + ## Internals -The caches are stored in `$TMP/v8-compile-cache/V8_VERSION`, where there are `.BLOB` and `.MAP` files corresponding to the entry module that required `v8-compile-cache`. The cache is _entry module specific_ because it is faster to load the entire code cache into memory at once, than it is to read it from disk on a file-by-file basis. +Cache files are suffixed `.BLOB` and `.MAP` corresponding to the entry module that required `v8-compile-cache`. The cache is _entry module specific_ because it is faster to load the entire code cache into memory at once, than it is to read it from disk on a file-by-file basis. ## Benchmarks diff --git a/tools/node_modules/eslint/node_modules/v8-compile-cache/package.json b/tools/node_modules/eslint/node_modules/v8-compile-cache/package.json index 83d66522f04c29..25540b730159b7 100644 --- a/tools/node_modules/eslint/node_modules/v8-compile-cache/package.json +++ b/tools/node_modules/eslint/node_modules/v8-compile-cache/package.json @@ -11,15 +11,15 @@ "deprecated": false, "description": "Require hook for automatic V8 compile cache persistence", "devDependencies": { - "babel-core": "6.23.1", - "eslint": "^3.15.0", - "flow-parser": "0.38.0", + "babel-core": "6.26.3", + "eslint": "^7.12.1", + "flow-parser": "0.136.0", "rimraf": "^2.5.4", - "rxjs": "5.2.0", + "rxjs": "6.6.3", "semver": "^5.3.0", "tap": "^10.1.1", "temp": "^0.8.3", - "yarn": "0.20.3" + "yarn": "1.22.10" }, "files": [ "v8-compile-cache.js" @@ -34,9 +34,10 @@ }, "scripts": { "bench": "bench/run.sh", - "lint": "eslint --max-warnings=0 .", - "posttest": "npm run lint", - "test": "tap test/*-test.js" + "eslint": "eslint --max-warnings=0 .", + "posttest": "npm run eslint", + "tap": "tap test/*-test.js", + "test": "npm run tap" }, - "version": "2.1.1" + "version": "2.2.0" } \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/v8-compile-cache/v8-compile-cache.js b/tools/node_modules/eslint/node_modules/v8-compile-cache/v8-compile-cache.js index 69f053667046ca..b9c09288cb310b 100644 --- a/tools/node_modules/eslint/node_modules/v8-compile-cache/v8-compile-cache.js +++ b/tools/node_modules/eslint/node_modules/v8-compile-cache/v8-compile-cache.js @@ -86,8 +86,6 @@ class FileSystemBlobStore { try { fs.writeFileSync(this._blobFilename, blobToStore); fs.writeFileSync(this._mapFilename, mapToStore); - } catch (error) { - throw error; } finally { fs.unlinkSync(this._lockFilename); } @@ -301,7 +299,8 @@ function slashEscape(str) { '\x00': 'z0', 'z': 'zZ', }; - return str.replace(/[\\:\/\x00z]/g, match => (ESCAPE_LOOKUP[match])); + const ESCAPE_REGEX = /[\\:/\x00z]/g; // eslint-disable-line no-control-regex + return str.replace(ESCAPE_REGEX, match => ESCAPE_LOOKUP[match]); } function supportsCachedData() { @@ -311,6 +310,11 @@ function supportsCachedData() { } function getCacheDir() { + const v8_compile_cache_cache_dir = process.env.V8_COMPILE_CACHE_CACHE_DIR; + if (v8_compile_cache_cache_dir) { + return v8_compile_cache_cache_dir; + } + // Avoid cache ownership issues on POSIX systems. const dirname = typeof process.getuid === 'function' ? 'v8-compile-cache-' + process.getuid() @@ -348,7 +352,7 @@ if (!process.env.DISABLE_V8_COMPILE_CACHE && supportsCachedData()) { nativeCompileCache.setCacheStore(blobStore); nativeCompileCache.install(); - process.once('exit', code => { + process.once('exit', () => { if (blobStore.isDirty()) { blobStore.save(); } diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json index a5f0c57b98a18d..53d055bc094d51 100644 --- a/tools/node_modules/eslint/package.json +++ b/tools/node_modules/eslint/package.json @@ -154,5 +154,5 @@ "test:cli": "mocha", "webpack": "node Makefile.js webpack" }, - "version": "7.12.1" + "version": "7.13.0" } \ No newline at end of file From 1924255fdb6a75a95d1e02b02ecbec88191ccd0b Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Fri, 23 Oct 2020 11:07:39 -0700 Subject: [PATCH 08/98] async_hooks: fix leak in AsyncLocalStorage exit MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If exit is called and then run or enterWith are called within the exit function, the als instace should not be added to the storageList additional times. The correct behaviour is to remove the instance from the storageList before executing the exit handler and then to restore it after. PR-URL: https://github.com/nodejs/node/pull/35779 Reviewed-By: Vladimir de Turckheim Reviewed-By: Michael Dawson Reviewed-By: Gerhard Stöbich Reviewed-By: Andrey Pechkurov Reviewed-By: Rich Trott --- lib/async_hooks.js | 18 +++++++------ ...-async-local-storage-exit-does-not-leak.js | 25 +++++++++++++++++++ 2 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 test/parallel/test-async-local-storage-exit-does-not-leak.js diff --git a/lib/async_hooks.js b/lib/async_hooks.js index 7dd888b61f79f9..b6865b6f1cd03e 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -271,6 +271,14 @@ class AsyncLocalStorage { } } + _enable() { + if (!this.enabled) { + this.enabled = true; + storageList.push(this); + storageHook.enable(); + } + } + // Propagate the context from a parent resource to a child one _propagate(resource, triggerResource) { const store = triggerResource[this.kResourceStore]; @@ -280,11 +288,7 @@ class AsyncLocalStorage { } enterWith(store) { - if (!this.enabled) { - this.enabled = true; - storageList.push(this); - storageHook.enable(); - } + this._enable(); const resource = executionAsyncResource(); resource[this.kResourceStore] = store; } @@ -308,11 +312,11 @@ class AsyncLocalStorage { if (!this.enabled) { return callback(...args); } - this.enabled = false; + this.disable(); try { return callback(...args); } finally { - this.enabled = true; + this._enable(); } } diff --git a/test/parallel/test-async-local-storage-exit-does-not-leak.js b/test/parallel/test-async-local-storage-exit-does-not-leak.js new file mode 100644 index 00000000000000..636d80f788b7fb --- /dev/null +++ b/test/parallel/test-async-local-storage-exit-does-not-leak.js @@ -0,0 +1,25 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); + +const als = new AsyncLocalStorage(); + +// Make sure _propagate function exists. +assert.ok(typeof als._propagate === 'function'); + +// The als instance should be getting removed from the storageList in +// lib/async_hooks.js when exit(...) is called, therefore when the nested runs +// are called there should be no copy of the als in the storageList to run the +// _propagate method on. +als._propagate = common.mustNotCall('_propagate() should not be called'); + +const done = common.mustCall(); + +function run(count) { + if (count === 0) return done(); + als.run({}, () => { + als.exit(run, --count); + }); +} +run(100); From 0b40568afebde7e5d0d56d7a3fc272d08809d18b Mon Sep 17 00:00:00 2001 From: Szymon Marczak <36894700+szmarczak@users.noreply.github.com> Date: Thu, 5 Nov 2020 23:56:00 +0100 Subject: [PATCH 09/98] http2: delay session.receive() by a tick PR-URL: https://github.com/nodejs/node/pull/35985 Reviewed-By: Matteo Collina Reviewed-By: Rich Trott --- lib/internal/http2/core.js | 23 ++++++---- .../test-http2-connect-tls-with-delay.js | 46 +++++++------------ 2 files changed, 30 insertions(+), 39 deletions(-) diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index b585d61cd1db92..fc72c048a9ddbe 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -3139,16 +3139,21 @@ function connect(authority, options, listener) { if (typeof listener === 'function') session.once('connect', listener); - debug('Http2Session connect', options.createConnection); - // Socket already has some buffered data - emulate receiving it - // https://github.com/nodejs/node/issues/35475 - if (socket && socket.readableLength) { - let buf; - while ((buf = socket.read()) !== null) { - debug(`Http2Session connect: injecting ${buf.length} already in buffer`); - session[kHandle].receive(buf); + // Process data on the next tick - a remoteSettings handler may be attached. + // https://github.com/nodejs/node/issues/35981 + process.nextTick(() => { + debug('Http2Session connect', options.createConnection); + // Socket already has some buffered data - emulate receiving it + // https://github.com/nodejs/node/issues/35475 + if (socket && socket.readableLength) { + let buf; + while ((buf = socket.read()) !== null) { + debug(`Http2Session connect: ${buf.length} bytes already in buffer`); + session[kHandle].receive(buf); + } } - } + }); + return session; } diff --git a/test/parallel/test-http2-connect-tls-with-delay.js b/test/parallel/test-http2-connect-tls-with-delay.js index 3e2e8a46a3662a..0b3753ae383642 100644 --- a/test/parallel/test-http2-connect-tls-with-delay.js +++ b/test/parallel/test-http2-connect-tls-with-delay.js @@ -4,11 +4,7 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); -if (!common.hasMultiLocalhost()) - common.skip('platform-specific test.'); - const http2 = require('http2'); -const assert = require('assert'); const tls = require('tls'); const fixtures = require('../common/fixtures'); @@ -16,15 +12,9 @@ const serverOptions = { key: fixtures.readKey('agent1-key.pem'), cert: fixtures.readKey('agent1-cert.pem') }; -const server = http2.createSecureServer(serverOptions, (req, res) => { - console.log(`Connect from: ${req.connection.remoteAddress}`); - assert.strictEqual(req.connection.remoteAddress, '127.0.0.2'); - req.on('end', common.mustCall(() => { - res.writeHead(200, { 'Content-Type': 'text/plain' }); - res.end(`You are from: ${req.connection.remoteAddress}`); - })); - req.resume(); +const server = http2.createSecureServer(serverOptions, (req, res) => { + res.end(); }); server.listen(0, '127.0.0.1', common.mustCall(() => { @@ -32,33 +22,29 @@ server.listen(0, '127.0.0.1', common.mustCall(() => { ALPNProtocols: ['h2'], host: '127.0.0.1', servername: 'localhost', - localAddress: '127.0.0.2', port: server.address().port, rejectUnauthorized: false }; - console.log('Server ready', server.address().port); - const socket = tls.connect(options, async () => { - - console.log('TLS Connected!'); - - setTimeout(() => { - + socket.once('readable', () => { const client = http2.connect( 'https://localhost:' + server.address().port, { ...options, createConnection: () => socket } ); - const req = client.request({ - ':path': '/' - }); - req.on('data', () => req.resume()); - req.on('end', common.mustCall(function() { - client.close(); - req.close(); - server.close(); + + client.once('remoteSettings', common.mustCall(() => { + const req = client.request({ + ':path': '/' + }); + req.on('data', () => req.resume()); + req.on('end', common.mustCall(() => { + client.close(); + req.close(); + server.close(); + })); + req.end(); })); - req.end(); - }, 1000); + }); }); })); From 9c6be3cc90eda11583e23f02361a62a96d7d2c97 Mon Sep 17 00:00:00 2001 From: zhangyongsheng Date: Tue, 10 Nov 2020 22:22:35 +0800 Subject: [PATCH 10/98] http2: allow setting the local window size of a session PR-URL: https://github.com/nodejs/node/pull/35978 Fixes: https://github.com/nodejs/node/issues/31084 Refs: https://github.com/nodejs/node/pull/26962 Reviewed-By: Matteo Collina Reviewed-By: Ricky Zhou <0x19951125@gmail.com> --- doc/api/errors.md | 5 + doc/api/http2.md | 23 ++++ lib/internal/errors.js | 1 + lib/internal/http2/core.js | 29 ++++- src/node_http2.cc | 21 +++ src/node_http2.h | 3 + test/parallel/test-http2-client-destroy.js | 2 + .../test-http2-client-setLocalWindowSize.js | 121 ++++++++++++++++++ .../test-http2-server-setLocalWindowSize.js | 37 ++++++ 9 files changed, 237 insertions(+), 5 deletions(-) create mode 100644 test/parallel/test-http2-client-setLocalWindowSize.js create mode 100644 test/parallel/test-http2-server-setLocalWindowSize.js diff --git a/doc/api/errors.md b/doc/api/errors.md index 192c4ecf5772f2..9433e00897877b 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1226,6 +1226,11 @@ reached. An attempt was made to initiate a new push stream from within a push stream. Nested push streams are not permitted. + +### `ERR_HTTP2_NO_MEM` + +Out of memory when using the `http2session.setLocalWindowSize(windowSize)` API. + ### `ERR_HTTP2_NO_SOCKET_MANIPULATION` diff --git a/doc/api/http2.md b/doc/api/http2.md index b4977397f350c1..d09d093f3a71d6 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -519,6 +519,29 @@ added: v8.4.0 A prototype-less object describing the current remote settings of this `Http2Session`. The remote settings are set by the *connected* HTTP/2 peer. +#### `http2session.setLocalWindowSize(windowSize)` + + +* `windowSize` {number} + +Sets the local endpoint's window size. +The `windowSize` is the total window size to set, not +the delta. + +```js +const http2 = require('http2'); + +const server = http2.createServer(); +const expectedWindowSize = 2 ** 20; +server.on('connect', (session) => { + + // Set local window size to be 2 ** 20 + session.setLocalWindowSize(expectedWindowSize); +}); +``` + #### `http2session.setTimeout(msecs, callback)` * Returns: {Array} of objects containing information about the wrapper functions -returned by [`tracker.calls()`][]. + returned by [`tracker.calls()`][]. * Object {Object} * `message` {string} * `actual` {number} The actual number of times the function was called. diff --git a/doc/api/async_hooks.md b/doc/api/async_hooks.md index f54e82ee46b909..08111b67a8507e 100644 --- a/doc/api/async_hooks.md +++ b/doc/api/async_hooks.md @@ -699,14 +699,14 @@ asyncResource.triggerAsyncId(); * `type` {string} The type of async event. * `options` {Object} * `triggerAsyncId` {number} The ID of the execution context that created this - async event. **Default:** `executionAsyncId()`. + async event. **Default:** `executionAsyncId()`. * `requireManualDestroy` {boolean} If set to `true`, disables `emitDestroy` - when the object is garbage collected. This usually does not need to be set - (even if `emitDestroy` is called manually), unless the resource's `asyncId` - is retrieved and the sensitive API's `emitDestroy` is called with it. - When set to `false`, the `emitDestroy` call on garbage collection - will only take place if there is at least one active `destroy` hook. - **Default:** `false`. + when the object is garbage collected. This usually does not need to be set + (even if `emitDestroy` is called manually), unless the resource's `asyncId` + is retrieved and the sensitive API's `emitDestroy` is called with it. + When set to `false`, the `emitDestroy` call on garbage collection + will only take place if there is at least one active `destroy` hook. + **Default:** `false`. Example usage: @@ -791,7 +791,7 @@ never be called. #### `asyncResource.triggerAsyncId()` * Returns: {number} The same `triggerAsyncId` that is passed to the -`AsyncResource` constructor. + `AsyncResource` constructor. ### Using `AsyncResource` for a `Worker` thread pool diff --git a/doc/api/debugger.md b/doc/api/debugger.md index 89980cfbd4a06f..b6dd45c7000b46 100644 --- a/doc/api/debugger.md +++ b/doc/api/debugger.md @@ -115,14 +115,14 @@ To begin watching an expression, type `watch('my_expression')`. The command * `setBreakpoint()`, `sb()`: Set breakpoint on current line * `setBreakpoint(line)`, `sb(line)`: Set breakpoint on specific line * `setBreakpoint('fn()')`, `sb(...)`: Set breakpoint on a first statement in -functions body + functions body * `setBreakpoint('script.js', 1)`, `sb(...)`: Set breakpoint on first line of -`script.js` + `script.js` * `setBreakpoint('script.js', 1, 'num < 4')`, `sb(...)`: Set conditional -breakpoint on first line of `script.js` that only breaks when `num < 4` -evaluates to `true` + breakpoint on first line of `script.js` that only breaks when `num < 4` + evaluates to `true` * `clearBreakpoint('script.js', 1)`, `cb(...)`: Clear breakpoint in `script.js` -on line 1 + on line 1 It is also possible to set a breakpoint in a file (module) that is not loaded yet: @@ -188,11 +188,11 @@ debug> * `backtrace`, `bt`: Print backtrace of current execution frame * `list(5)`: List scripts source code with 5 line context (5 lines before and -after) + after) * `watch(expr)`: Add expression to watch list * `unwatch(expr)`: Remove expression from watch list * `watchers`: List all watchers and their values (automatically listed on each -breakpoint) + breakpoint) * `repl`: Open debugger's repl for evaluation in debugging script's context * `exec expr`: Execute an expression in debugging script's context diff --git a/doc/api/dns.md b/doc/api/dns.md index a284c9d0c57bd5..613bd2465b9464 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -250,13 +250,13 @@ changes: The following flags can be passed as hints to [`dns.lookup()`][]. * `dns.ADDRCONFIG`: Limits returned address types to the types of non-loopback -addresses configured on the system. For example, IPv4 addresses are only -returned if the current system has at least one IPv4 address configured. + addresses configured on the system. For example, IPv4 addresses are only + returned if the current system has at least one IPv4 address configured. * `dns.V4MAPPED`: If the IPv6 family was specified, but no IPv6 addresses were -found, then return IPv4 mapped IPv6 addresses. It is not supported -on some operating systems (e.g FreeBSD 10.1). + found, then return IPv4 mapped IPv6 addresses. It is not supported + on some operating systems (e.g FreeBSD 10.1). * `dns.ALL`: If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as -well as IPv4 mapped IPv6 addresses. + well as IPv4 mapped IPv6 addresses. ## `dns.lookupService(address, port, callback)` * `options` {Object} Accepts `options` from [`tls.createServer()`][], - [`tls.createSecureContext()`][] and [`http.createServer()`][]. + [`tls.createSecureContext()`][] and [`http.createServer()`][]. * `requestListener` {Function} A listener to be added to the `'request'` event. * Returns: {https.Server} diff --git a/doc/api/modules.md b/doc/api/modules.md index 03d919f37fd9b3..8648ec3ee79182 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -503,7 +503,7 @@ wrapper that looks like the following: By doing this, Node.js achieves a few things: * It keeps top-level variables (defined with `var`, `const` or `let`) scoped to -the module rather than the global object. + the module rather than the global object. * It helps to provide some global-looking variables that are actually specific to the module, such as: * The `module` and `exports` objects that the implementor can use to export diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 9f16d2d2933b90..615237c76bcae8 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -864,8 +864,8 @@ typedef void (*napi_async_cleanup_hook)(napi_async_cleanup_hook_handle handle, ``` * `[in] handle`: The handle that must be passed to -[`napi_remove_async_cleanup_hook`][] after completion of the asynchronous -cleanup. + [`napi_remove_async_cleanup_hook`][] after completion of the asynchronous + cleanup. * `[in] data`: The data that was passed to [`napi_add_async_cleanup_hook`][]. The body of the function should initiate the asynchronous cleanup actions at the @@ -945,7 +945,7 @@ napi_get_last_error_info(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[out] result`: The `napi_extended_error_info` structure with more -information about the error. + information about the error. Returns `napi_ok` if the API succeeded. @@ -1725,7 +1725,7 @@ NAPI_EXTERN napi_status napi_add_async_cleanup_hook( * `[in] hook`: The function pointer to call at environment teardown. * `[in] arg`: The pointer to pass to `hook` when it gets called. * `[out] remove_handle`: Optional handle that refers to the asynchronous cleanup -hook. + hook. Registers `hook`, which is a function of type [`napi_async_cleanup_hook`][], as a function to be run with the `remove_handle` and `arg` parameters once the @@ -1762,7 +1762,7 @@ NAPI_EXTERN napi_status napi_remove_async_cleanup_hook( ``` * `[in] remove_handle`: The handle to an asynchronous cleanup hook that was -created with [`napi_add_async_cleanup_hook`][]. + created with [`napi_add_async_cleanup_hook`][]. Unregisters the cleanup hook corresponding to `remove_handle`. This will prevent the hook from being executed, unless it has already started executing. @@ -3372,7 +3372,7 @@ napi_status napi_typeof(napi_env env, napi_value value, napi_valuetype* result) Returns `napi_ok` if the API succeeded. * `napi_invalid_arg` if the type of `value` is not a known ECMAScript type and - `value` is not an External value. + `value` is not an External value. This API represents behavior similar to invoking the `typeof` Operator on the object as defined in [Section 12.5.5][] of the ECMAScript Language @@ -3902,11 +3902,11 @@ napi_get_all_property_names(napi_env env, * `[in] object`: The object from which to retrieve the properties. * `[in] key_mode`: Whether to retrieve prototype properties as well. * `[in] key_filter`: Which properties to retrieve -(enumerable/readable/writable). + (enumerable/readable/writable). * `[in] key_conversion`: Whether to convert numbered property keys to strings. * `[out] result`: A `napi_value` representing an array of JavaScript values -that represent the property names of the object. [`napi_get_array_length`][] and -[`napi_get_element`][] can be used to iterate over `result`. + that represent the property names of the object. [`napi_get_array_length`][] + and [`napi_get_element`][] can be used to iterate over `result`. Returns `napi_ok` if the API succeeded. @@ -4942,7 +4942,7 @@ napi_status napi_check_object_type_tag(napi_env env, * `[in] js_object`: The JavaScript object whose type tag to examine. * `[in] type_tag`: The tag with which to compare any tag found on the object. * `[out] result`: Whether the type tag given matched the type tag on the -object. `false` is also returned if no type tag was found on the object. + object. `false` is also returned if no type tag was found on the object. Returns `napi_ok` if the API succeeded. diff --git a/doc/api/report.md b/doc/api/report.md index b62de913be2b29..9ee65280a8aa41 100644 --- a/doc/api/report.md +++ b/doc/api/report.md @@ -396,15 +396,15 @@ node --report-uncaught-exception --report-on-signal \ ``` * `--report-uncaught-exception` Enables report to be generated on -un-caught exceptions. Useful when inspecting JavaScript stack in conjunction -with native stack and other runtime environment data. + un-caught exceptions. Useful when inspecting JavaScript stack in conjunction + with native stack and other runtime environment data. * `--report-on-signal` Enables report to be generated upon receiving -the specified (or predefined) signal to the running Node.js process. (See below -on how to modify the signal that triggers the report.) Default signal is `SIGUSR2`. -Useful when a report needs to be triggered from another program. -Application monitors may leverage this feature to collect report at regular -intervals and plot rich set of internal runtime data to their views. + the specified (or predefined) signal to the running Node.js process. (See + below on how to modify the signal that triggers the report.) Default signal is + `SIGUSR2`. Useful when a report needs to be triggered from another program. + Application monitors may leverage this feature to collect report at regular + intervals and plot rich set of internal runtime data to their views. Signal based report generation is not supported in Windows. @@ -413,24 +413,24 @@ signal. However, if `SIGUSR2` is already used for other purposes, then this flag helps to change the signal for report generation and preserve the original meaning of `SIGUSR2` for the said purposes. -* `--report-on-fatalerror` Enables the report to be triggered on -fatal errors (internal errors within the Node.js runtime, such as out of memory) -that leads to termination of the application. Useful to inspect various -diagnostic data elements such as heap, stack, event loop state, resource -consumption etc. to reason about the fatal error. +* `--report-on-fatalerror` Enables the report to be triggered on fatal errors + (internal errors within the Node.js runtime, such as out of memory) + that leads to termination of the application. Useful to inspect various + diagnostic data elements such as heap, stack, event loop state, resource + consumption etc. to reason about the fatal error. * `--report-compact` Write reports in a compact format, single-line JSON, more -easily consumable by log processing systems than the default multi-line format -designed for human consumption. + easily consumable by log processing systems than the default multi-line format + designed for human consumption. * `--report-directory` Location at which the report will be -generated. + generated. * `--report-filename` Name of the file to which the report will be -written. + written. * `--report-signal` Sets or resets the signal for report generation -(not supported on Windows). Default signal is `SIGUSR2`. + (not supported on Windows). Default signal is `SIGUSR2`. A report can also be triggered via an API call from a JavaScript application: diff --git a/doc/api/stream.md b/doc/api/stream.md index f0a818111948fe..6a22446743ec3c 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -701,11 +701,11 @@ A [`Readable`][] stream can be in object mode or not, regardless of whether it is in flowing mode or paused mode. * In flowing mode, data is read from the underlying system automatically -and provided to an application as quickly as possible using events via the -[`EventEmitter`][] interface. + and provided to an application as quickly as possible using events via the + [`EventEmitter`][] interface. * In paused mode, the [`stream.read()`][stream-read] method must be called -explicitly to read chunks of data from the stream. + explicitly to read chunks of data from the stream. All [`Readable`][] streams begin in paused mode but can be switched to flowing mode in one of the following ways: diff --git a/doc/api/string_decoder.md b/doc/api/string_decoder.md index 819283e51175c4..e0e1323cf331e3 100644 --- a/doc/api/string_decoder.md +++ b/doc/api/string_decoder.md @@ -62,7 +62,7 @@ added: v0.9.3 --> * `buffer` {Buffer|TypedArray|DataView} A `Buffer`, or `TypedArray`, or - `DataView` containing the bytes to decode. + `DataView` containing the bytes to decode. * Returns: {string} Returns any remaining input stored in the internal buffer as a string. Bytes @@ -84,7 +84,7 @@ changes: --> * `buffer` {Buffer|TypedArray|DataView} A `Buffer`, or `TypedArray`, or - `DataView` containing the bytes to decode. + `DataView` containing the bytes to decode. * Returns: {string} Returns a decoded string, ensuring that any incomplete multibyte characters at diff --git a/doc/api/tls.md b/doc/api/tls.md index 128fac46064653..bedf4e28e7bc04 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -644,7 +644,7 @@ added: v0.3.2 --> * `callback` {Function} A listener callback that will be registered to listen -for the server instance's `'close'` event. + for the server instance's `'close'` event. * Returns: {tls.Server} The `server.close()` method stops the server from accepting new connections. @@ -975,8 +975,8 @@ added: v9.9.0 --> * Returns: {Buffer|undefined} The latest `Finished` message that has been -sent to the socket as part of a SSL/TLS handshake, or `undefined` if -no `Finished` message has been sent yet. + sent to the socket as part of a SSL/TLS handshake, or `undefined` if + no `Finished` message has been sent yet. As the `Finished` messages are message digests of the complete handshake (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can @@ -1033,7 +1033,7 @@ certificate. `'2A:7A:C2:DD:...'`. * `ext_key_usage` {Array} (Optional) The extended key usage, a set of OIDs. * `subjectaltname` {string} (Optional) A string containing concatenated names - for the subject, an alternative to the `subject` names. + for the subject, an alternative to the `subject` names. * `infoAccess` {Array} (Optional) An array describing the AuthorityInfoAccess, used with OCSP. * `issuerCertificate` {Object} (Optional) The issuer certificate object. For @@ -1099,8 +1099,8 @@ added: v9.9.0 --> * Returns: {Buffer|undefined} The latest `Finished` message that is expected -or has actually been received from the socket as part of a SSL/TLS handshake, -or `undefined` if there is no `Finished` message so far. + or has actually been received from the socket as part of a SSL/TLS handshake, + or `undefined` if there is no `Finished` message so far. As the `Finished` messages are message digests of the complete handshake (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can @@ -1155,7 +1155,7 @@ added: v12.11.0 --> * Returns: {Array} List of signature algorithms shared between the server and -the client in the order of decreasing preference. + the client in the order of decreasing preference. See [SSL_get_shared_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) @@ -1170,8 +1170,8 @@ added: * `length` {number} number of bytes to retrieve from keying material * `label` {string} an application specific label, typically this will be a -value from the -[IANA Exporter Label Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + value from the + [IANA Exporter Label Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). * `context` {Buffer} Optionally provide a context. * Returns: {Buffer} requested bytes of the keying material diff --git a/doc/api/util.md b/doc/api/util.md index 1cd8fef07e2976..4a506b18b70ddf 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -78,7 +78,7 @@ added: v0.11.3 * `section` {string} A string identifying the portion of the application for which the `debuglog` function is being created. * `callback` {Function} A callback invoked the first time the logging function -is called with a function argument that is a more optimized logging function. + is called with a function argument that is a more optimized logging function. * Returns: {Function} The logging function The `util.debuglog()` method is used to create a function that conditionally @@ -1073,7 +1073,7 @@ changes: --> * {symbol} that can be used to declare custom promisified variants of functions, -see [Custom promisified functions][]. + see [Custom promisified functions][]. In addition to being accessible through `util.promisify.custom`, this symbol is [registered globally][global symbol registry] and can be From 63494e434adf796db17f8230d4899790c6434aad Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 9 Nov 2020 05:45:59 -0800 Subject: [PATCH 15/98] tools: update doc tools to remark-parse@9.0.0 PR-URL: https://github.com/nodejs/node/pull/36049 Reviewed-By: Antoine du Hamel Reviewed-By: Daijiro Wachi --- tools/doc/addon-verify.js | 3 +- tools/doc/checkLinks.js | 1 + tools/doc/generate.js | 2 + tools/doc/html.js | 4 + tools/doc/package-lock.json | 1407 +++++++++++++++++++++++++++++++++-- tools/doc/package.json | 8 +- 6 files changed, 1362 insertions(+), 63 deletions(-) diff --git a/tools/doc/addon-verify.js b/tools/doc/addon-verify.js index c4dfdba9c4d3ad..8093439d6d925c 100644 --- a/tools/doc/addon-verify.js +++ b/tools/doc/addon-verify.js @@ -11,13 +11,14 @@ const { resolve } = require('path'); const vfile = require('to-vfile'); const unified = require('unified'); const remarkParse = require('remark-parse'); +const gfm = require('remark-gfm'); const rootDir = resolve(__dirname, '..', '..'); const doc = resolve(rootDir, 'doc', 'api', 'addons.md'); const verifyDir = resolve(rootDir, 'test', 'addons'); const file = vfile.readSync(doc, 'utf8'); -const tree = unified().use(remarkParse).parse(file); +const tree = unified().use(remarkParse).use(gfm).parse(file); const addons = {}; let id = 0; let currentHeader; diff --git a/tools/doc/checkLinks.js b/tools/doc/checkLinks.js index 19247c9340e75a..cdee7ca600d0c3 100644 --- a/tools/doc/checkLinks.js +++ b/tools/doc/checkLinks.js @@ -45,6 +45,7 @@ function findMarkdownFilesRecursively(dirPath) { function checkFile(path) { const tree = unified() .use(require('remark-parse')) + .use(require('remark-gfm')) .parse(fs.readFileSync(path)); const base = pathToFileURL(path); diff --git a/tools/doc/generate.js b/tools/doc/generate.js index 007a0d48eed347..f49acb6207c9b4 100644 --- a/tools/doc/generate.js +++ b/tools/doc/generate.js @@ -25,6 +25,7 @@ const { promises: fs } = require('fs'); const path = require('path'); const unified = require('unified'); const markdown = require('remark-parse'); +const gfm = require('remark-gfm'); const remark2rehype = require('remark-rehype'); const raw = require('rehype-raw'); const htmlStringify = require('rehype-stringify'); @@ -82,6 +83,7 @@ async function main() { const content = await unified() .use(replaceLinks, { filename, linksMapper }) .use(markdown) + .use(gfm) .use(html.preprocessText, { nodeVersion }) .use(json.jsonAPI, { filename }) .use(html.firstHeader) diff --git a/tools/doc/html.js b/tools/doc/html.js index b74b8ac15462c9..53b14e1e0899f7 100644 --- a/tools/doc/html.js +++ b/tools/doc/html.js @@ -27,6 +27,7 @@ const unified = require('unified'); const find = require('unist-util-find'); const visit = require('unist-util-visit'); const markdown = require('remark-parse'); +const gfm = require('remark-gfm'); const remark2rehype = require('remark-rehype'); const raw = require('rehype-raw'); const htmlStringify = require('rehype-stringify'); @@ -56,6 +57,7 @@ const gtocMD = fs.readFileSync(gtocPath, 'utf8') .replace(/^/gms, ''); const gtocHTML = unified() .use(markdown) + .use(gfm) .use(remark2rehype, { allowDangerousHtml: true }) .use(raw) .use(navClasses) @@ -283,6 +285,7 @@ function parseYAML(text) { meta.changes.forEach((change) => { const description = unified() .use(markdown) + .use(gfm) .use(remark2rehype, { allowDangerousHtml: true }) .use(raw) .use(htmlStringify) @@ -381,6 +384,7 @@ function buildToc({ filename, apilinks }) { file.toc = unified() .use(markdown) + .use(gfm) .use(remark2rehype, { allowDangerousHtml: true }) .use(raw) .use(htmlStringify) diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index 36514e5989c7b7..bf8da52d5fc22b 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -1,8 +1,1163 @@ { "name": "node-doc-generator", "version": "0.0.0", - "lockfileVersion": 1, + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "node-doc-generator", + "version": "0.0.0", + "bin": { + "node-doc-generator": "generate.js" + }, + "devDependencies": { + "highlight.js": "10.1.0", + "js-yaml": "3.14.0", + "rehype-raw": "4.0.2", + "rehype-stringify": "8.0.0", + "remark-gfm": "^1.0.0", + "remark-html": "12.0.0", + "remark-parse": "^9.0.0", + "remark-rehype": "7.0.0", + "to-vfile": "6.1.0", + "unified": "9.2.0", + "unist-util-find": "1.0.1", + "unist-util-select": "3.0.1", + "unist-util-visit": "2.0.3" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@types/mdast": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", + "integrity": "sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw==", + "dev": true, + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/unist": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", + "integrity": "sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ==", + "dev": true + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/bail": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", + "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", + "dev": true + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", + "dev": true + }, + "node_modules/ccount": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.0.5.tgz", + "integrity": "sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==", + "dev": true + }, + "node_modules/character-entities": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", + "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", + "dev": true + }, + "node_modules/character-entities-html4": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-1.1.4.tgz", + "integrity": "sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g==", + "dev": true + }, + "node_modules/character-entities-legacy": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", + "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", + "dev": true + }, + "node_modules/character-reference-invalid": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", + "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", + "dev": true + }, + "node_modules/collapse-white-space": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", + "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==", + "dev": true + }, + "node_modules/comma-separated-tokens": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", + "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", + "dev": true + }, + "node_modules/css-selector-parser": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.4.1.tgz", + "integrity": "sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g==", + "dev": true + }, + "node_modules/debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + } + }, + "node_modules/hast-to-hyperscript": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-7.0.4.tgz", + "integrity": "sha512-vmwriQ2H0RPS9ho4Kkbf3n3lY436QKLq6VaGA1pzBh36hBi3tm1DO9bR+kaJIbpT10UqaANDkMjxvjVfr+cnOA==", + "dev": true, + "dependencies": { + "comma-separated-tokens": "^1.0.0", + "property-information": "^5.3.0", + "space-separated-tokens": "^1.0.0", + "style-to-object": "^0.2.1", + "unist-util-is": "^3.0.0", + "web-namespaces": "^1.1.2" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz", + "integrity": "sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA==", + "dev": true, + "dependencies": { + "ccount": "^1.0.3", + "hastscript": "^5.0.0", + "property-information": "^5.0.0", + "web-namespaces": "^1.1.2", + "xtend": "^4.0.1" + } + }, + "node_modules/hast-util-is-element": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz", + "integrity": "sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==", + "dev": true + }, + "node_modules/hast-util-parse-selector": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.4.tgz", + "integrity": "sha512-gW3sxfynIvZApL4L07wryYF4+C9VvH3AUi7LAnVXV4MneGEgwOByXvFo18BgmTWnm7oHAe874jKbIB1YhHSIzA==", + "dev": true + }, + "node_modules/hast-util-raw": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-5.0.2.tgz", + "integrity": "sha512-3ReYQcIHmzSgMq8UrDZHFL0oGlbuVGdLKs8s/Fe8BfHFAyZDrdv1fy/AGn+Fim8ZuvAHcJ61NQhVMtyfHviT/g==", + "dev": true, + "dependencies": { + "hast-util-from-parse5": "^5.0.0", + "hast-util-to-parse5": "^5.0.0", + "html-void-elements": "^1.0.0", + "parse5": "^5.0.0", + "unist-util-position": "^3.0.0", + "web-namespaces": "^1.0.0", + "xtend": "^4.0.0", + "zwitch": "^1.0.0" + } + }, + "node_modules/hast-util-sanitize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-3.0.0.tgz", + "integrity": "sha512-gxsM24ARtuulsrWEj8QtVM6FNeAEHklF/t7TEIWvX1wuQcoAQtJtEUcT8t0os4uxCUqh1epX/gTi8fp8gNKvCA==", + "dev": true, + "dependencies": { + "xtend": "^4.0.0" + } + }, + "node_modules/hast-util-to-html": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-7.1.1.tgz", + "integrity": "sha512-Ujqj0hGuo3dIQKilkbauAv5teOqPvhaSLEgs1lgApFT0812e114KiffV8XfE4ttR8dRPqxNOIJOMu6SKOVOGlg==", + "dev": true, + "dependencies": { + "ccount": "^1.0.0", + "comma-separated-tokens": "^1.0.0", + "hast-util-is-element": "^1.0.0", + "hast-util-whitespace": "^1.0.0", + "html-void-elements": "^1.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0", + "stringify-entities": "^3.0.1", + "unist-util-is": "^4.0.0", + "xtend": "^4.0.0" + } + }, + "node_modules/hast-util-to-html/node_modules/unist-util-is": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.2.tgz", + "integrity": "sha512-Ofx8uf6haexJwI1gxWMGg6I/dLnF2yE+KibhD3/diOqY2TinLcqHXCV6OI5gFVn3xQqDH+u0M625pfKwIwgBKQ==", + "dev": true + }, + "node_modules/hast-util-to-parse5": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-5.1.2.tgz", + "integrity": "sha512-ZgYLJu9lYknMfsBY0rBV4TJn2xiwF1fXFFjbP6EE7S0s5mS8LIKBVWzhA1MeIs1SWW6GnnE4In6c3kPb+CWhog==", + "dev": true, + "dependencies": { + "hast-to-hyperscript": "^7.0.0", + "property-information": "^5.0.0", + "web-namespaces": "^1.0.0", + "xtend": "^4.0.0", + "zwitch": "^1.0.0" + } + }, + "node_modules/hast-util-whitespace": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz", + "integrity": "sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A==", + "dev": true + }, + "node_modules/hastscript": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-5.1.2.tgz", + "integrity": "sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ==", + "dev": true, + "dependencies": { + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0" + } + }, + "node_modules/highlight.js": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.1.0.tgz", + "integrity": "sha512-e8aO/LUHDoxW4ntyKQf0/T3OtIZPhsfTr8XRuOq+FW5VdWEg/UDAeArzKF/22BaNZp6hPi/Zu/XQlTLOGLix3Q==", + "dev": true + }, + "node_modules/html-void-elements": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", + "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", + "dev": true + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/inline-style-parser": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", + "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==", + "dev": true + }, + "node_modules/is-alphabetical": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", + "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "dev": true + }, + "node_modules/is-alphanumerical": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", + "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "dev": true, + "dependencies": { + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0" + } + }, + "node_modules/is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true + }, + "node_modules/is-decimal": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", + "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", + "dev": true + }, + "node_modules/is-hexadecimal": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", + "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "dev": true + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", + "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "node_modules/lodash.iteratee": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.iteratee/-/lodash.iteratee-4.7.0.tgz", + "integrity": "sha1-vkF32yiajMw8CZDx2ya1si/BVUw=", + "dev": true + }, + "node_modules/longest-streak": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-1.0.0.tgz", + "integrity": "sha1-0GWXxNTDG1LMsfXY+P5xSOr9aWU=", + "dev": true + }, + "node_modules/markdown-table": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-0.4.0.tgz", + "integrity": "sha1-iQwsGzv+g/sA5BKbjkz+ZFJw+dE=", + "dev": true + }, + "node_modules/mdast-util-definitions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-3.0.1.tgz", + "integrity": "sha512-BAv2iUm/e6IK/b2/t+Fx69EL/AGcq/IG2S+HxHjDJGfLJtd6i9SZUS76aC9cig+IEucsqxKTR0ot3m933R3iuA==", + "dev": true, + "dependencies": { + "unist-util-visit": "^2.0.0" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.1.tgz", + "integrity": "sha512-qJXNcFcuCSPqUF0Tb0uYcFDIq67qwB3sxo9RPdf9vG8T90ViKnksFqdB/Coq2a7sTnxL/Ify2y7aIQXDkQFH0w==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0", + "mdast-util-to-string": "^1.0.0", + "micromark": "~2.10.0", + "parse-entities": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.0.tgz", + "integrity": "sha512-HLfygQL6HdhJhFbLta4Ki9hClrzyAxRjyRvpm5caN65QZL+NyHPmqFlnF9vm1Rn58JT2+AbLwNcEDY4MEvkk8Q==", + "dev": true, + "dependencies": { + "mdast-util-gfm-autolink-literal": "^0.1.0", + "mdast-util-gfm-strikethrough": "^0.2.0", + "mdast-util-gfm-table": "^0.1.0", + "mdast-util-gfm-task-list-item": "^0.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.1.tgz", + "integrity": "sha512-gJ2xSpqKCetSr22GEWpZH3f5ffb4pPn/72m4piY0v7T/S+O7n7rw+sfoPLhb2b4O7WdnERoYdALRcmD68FMtlw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.2.tgz", + "integrity": "sha512-T37ZbaokJcRbHROXmoVAieWnesPD5N21tv2ifYzaGRLbkh1gknItUGhZzHefUn5Zc/eaO/iTDSAFOBrn/E8kWw==", + "dev": true, + "dependencies": { + "mdast-util-to-markdown": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.4.tgz", + "integrity": "sha512-T4xFSON9kUb/IpYA5N+KGWcsdGczAvILvKiXQwUGind6V9fvjPCR9yhZnIeaLdBWXaz3m/Gq77ZtuLMjtFR4IQ==", + "dev": true, + "dependencies": { + "markdown-table": "^2.0.0", + "mdast-util-to-markdown": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table/node_modules/markdown-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", + "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dev": true, + "dependencies": { + "repeat-string": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.5.tgz", + "integrity": "sha512-6O0bt34r+e7kYjeSwedhjDPYraspKIYKbhvhQEEioL7gSmXDxhN7WQW2KoxhVMpNzjNc03yC7K5KH6NHlz2jOA==", + "dev": true, + "dependencies": { + "mdast-util-to-markdown": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-9.1.1.tgz", + "integrity": "sha512-vpMWKFKM2mnle+YbNgDXxx95vv0CoLU0v/l3F5oFAG5DV7qwkZVWA206LsAdOnEVyf5vQcLnb3cWJywu7mUxsQ==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.3", + "mdast-util-definitions": "^3.0.0", + "mdurl": "^1.0.0", + "unist-builder": "^2.0.0", + "unist-util-generated": "^1.0.0", + "unist-util-position": "^3.0.0", + "unist-util-visit": "^2.0.0" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.5.3.tgz", + "integrity": "sha512-sr8q7fQJ1xoCqZSXW6dO/MYu2Md+a4Hfk9uO+XHCfiBhVM0EgWtfAV7BuN+ff6otUeu2xDyt1o7vhZGwOG3+BA==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "longest-streak": "^2.0.0", + "mdast-util-to-string": "^1.0.0", + "parse-entities": "^2.0.0", + "repeat-string": "^1.0.0", + "zwitch": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown/node_modules/longest-streak": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", + "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-to-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-1.1.0.tgz", + "integrity": "sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", + "dev": true + }, + "node_modules/micromark": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.10.1.tgz", + "integrity": "sha512-fUuVF8sC1X7wsCS29SYQ2ZfIZYbTymp0EYr6sab3idFjigFFjGa5UwoniPlV9tAgntjuapW1t9U+S0yDYeGKHQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.1.tgz", + "integrity": "sha512-lJlhcOqzoJdjQg+LMumVHdUQ61LjtqGdmZtrAdfvatRUnJTqZlRwXXHdLQgNDYlFw4mycZ4NSTKlya5QcQXl1A==", + "dev": true, + "dependencies": { + "micromark": "~2.10.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.0", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.1.tgz", + "integrity": "sha512-j30923tDp0faCNDjwqe4cMi+slegbGfc3VEAExEU8d54Q/F6pR6YxCVH+6xV0ItRoj3lCn1XkUWcy6FC3S9BOw==", + "dev": true, + "dependencies": { + "micromark": "~2.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.2.tgz", + "integrity": "sha512-aehEEqtTn3JekJNwZZxa7ZJVfzmuaWp4ew6x6sl3VAKIwdDZdqYeYSQIrNKwNgH7hX0g56fAwnSDLusJggjlCQ==", + "dev": true, + "dependencies": { + "micromark": "~2.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.1.tgz", + "integrity": "sha512-xVpqOnfFaa2OtC/Y7rlt4tdVFlUHdoLH3RXAZgb/KP3DDyKsAOx6BRS3UxiiyvmD/p2l6VUpD4bMIniuP4o4JA==", + "dev": true, + "dependencies": { + "micromark": "~2.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", + "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.2.tgz", + "integrity": "sha512-cm8lYS10YAqeXE9B27TK3u1Ihumo3H9p/3XumT+jp8vSuSbSpFIJe0bDi2kq4YAAIxtcTzUOxhEH4ko2/NYDkQ==", + "dev": true, + "dependencies": { + "micromark": "~2.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/not": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/not/-/not-0.1.0.tgz", + "integrity": "sha1-yWkcF0bFXc++VMvYvU/wQbwrUZ0=", + "dev": true + }, + "node_modules/nth-check": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", + "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", + "dev": true, + "dependencies": { + "boolbase": "~1.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parse-entities": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", + "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "dev": true, + "dependencies": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "dev": true + }, + "node_modules/property-information": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.5.0.tgz", + "integrity": "sha512-RgEbCx2HLa1chNgvChcx+rrCWD0ctBmGSE0M7lVm1yyv4UbvbrWoXp/BkVLZefzjrRBGW8/Js6uh/BnlHXFyjA==", + "dev": true, + "dependencies": { + "xtend": "^4.0.0" + } + }, + "node_modules/rehype-raw": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-4.0.2.tgz", + "integrity": "sha512-xQt94oXfDaO7sK9mJBtsZXkjW/jm6kArCoYN+HqKZ51O19AFHlp3Xa5UfZZ2tJkbpAZzKtgVUYvnconk9IsFuA==", + "dev": true, + "dependencies": { + "hast-util-raw": "^5.0.0" + } + }, + "node_modules/rehype-stringify": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-8.0.0.tgz", + "integrity": "sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g==", + "dev": true, + "dependencies": { + "hast-util-to-html": "^7.1.1" + } + }, + "node_modules/remark": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/remark/-/remark-5.1.0.tgz", + "integrity": "sha1-y0Y709vLS5l5STXu4c9x16jjBow=", + "dev": true, + "dependencies": { + "remark-parse": "^1.1.0", + "remark-stringify": "^1.1.0", + "unified": "^4.1.1" + } + }, + "node_modules/remark-gfm": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", + "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "dev": true, + "dependencies": { + "mdast-util-gfm": "^0.1.0", + "micromark-extension-gfm": "^0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-html": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-12.0.0.tgz", + "integrity": "sha512-M104NMHs48+uswChJkCDXCdabzxAinpHikpt6kS3gmGMyIvPZ5kn53tB9shFsL2O4HUJ9DIEsah1SX1Ve5FXHA==", + "dev": true, + "dependencies": { + "hast-util-sanitize": "^3.0.0", + "hast-util-to-html": "^7.0.0", + "mdast-util-to-hast": "^9.0.0", + "xtend": "^4.0.1" + } + }, + "node_modules/remark-parse": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", + "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", + "dev": true, + "dependencies": { + "mdast-util-from-markdown": "^0.8.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-7.0.0.tgz", + "integrity": "sha512-uqQ/VbaTdxyu/da6npHAso6hA00cMqhA3a59RziQdOLN2KEIkPykAVy52IcmZEVTuauXO0VtpxkyCey4phtHzQ==", + "dev": true, + "dependencies": { + "mdast-util-to-hast": "^9.1.0" + } + }, + "node_modules/remark-stringify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-1.1.0.tgz", + "integrity": "sha1-pxBeJbnuK/mkm3XSxCPxGwauIJI=", + "dev": true, + "dependencies": { + "ccount": "^1.0.0", + "extend": "^3.0.0", + "longest-streak": "^1.0.0", + "markdown-table": "^0.4.0", + "parse-entities": "^1.0.2", + "repeat-string": "^1.5.4", + "stringify-entities": "^1.0.1", + "unherit": "^1.0.4" + } + }, + "node_modules/remark-stringify/node_modules/parse-entities": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", + "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", + "dev": true, + "dependencies": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + } + }, + "node_modules/remark-stringify/node_modules/stringify-entities": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-1.3.2.tgz", + "integrity": "sha512-nrBAQClJAPN2p+uGCVJRPIPakKeKWZ9GtBCmormE7pWOSlHat7+x5A8gx85M7HM5Dt0BP3pP5RhVW77WdbJJ3A==", + "dev": true, + "dependencies": { + "character-entities-html4": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-hexadecimal": "^1.0.0" + } + }, + "node_modules/remark/node_modules/parse-entities": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", + "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", + "dev": true, + "dependencies": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + } + }, + "node_modules/remark/node_modules/remark-parse": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-1.1.0.tgz", + "integrity": "sha1-w8oQ+ajaBGFcKPCapOMEUQUm7CE=", + "dev": true, + "dependencies": { + "collapse-white-space": "^1.0.0", + "extend": "^3.0.0", + "parse-entities": "^1.0.2", + "repeat-string": "^1.5.4", + "trim": "0.0.1", + "trim-trailing-lines": "^1.0.0", + "unherit": "^1.0.4", + "unist-util-remove-position": "^1.0.0", + "vfile-location": "^2.0.0" + } + }, + "node_modules/remark/node_modules/unified": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/unified/-/unified-4.2.1.tgz", + "integrity": "sha1-dv9Dqo2kMPbn5KVchOusKtLPzS4=", + "dev": true, + "dependencies": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "has": "^1.0.1", + "once": "^1.3.3", + "trough": "^1.0.0", + "vfile": "^1.0.0" + } + }, + "node_modules/remark/node_modules/unist-util-remove-position": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz", + "integrity": "sha512-tLqd653ArxJIPnKII6LMZwH+mb5q+n/GtXQZo6S6csPRs5zB0u79Yw8ouR3wTw8wxvdJFhpP6Y7jorWdCgLO0A==", + "dev": true, + "dependencies": { + "unist-util-visit": "^1.1.0" + } + }, + "node_modules/remark/node_modules/unist-util-visit": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-1.4.1.tgz", + "integrity": "sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==", + "dev": true, + "dependencies": { + "unist-util-visit-parents": "^2.0.0" + } + }, + "node_modules/remark/node_modules/unist-util-visit-parents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz", + "integrity": "sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==", + "dev": true, + "dependencies": { + "unist-util-is": "^3.0.0" + } + }, + "node_modules/remark/node_modules/vfile": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-1.4.0.tgz", + "integrity": "sha1-wP1vpIT43r23cfaMMe112I2pf+c=", + "dev": true + }, + "node_modules/remark/node_modules/vfile-location": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-2.0.6.tgz", + "integrity": "sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA==", + "dev": true + }, + "node_modules/repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true + }, + "node_modules/replace-ext": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.0.tgz", + "integrity": "sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs=", + "dev": true + }, + "node_modules/space-separated-tokens": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", + "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", + "dev": true + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "node_modules/stringify-entities": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-3.0.1.tgz", + "integrity": "sha512-Lsk3ISA2++eJYqBMPKcr/8eby1I6L0gP0NlxF8Zja6c05yr/yCYyb2c9PwXjd08Ib3If1vn1rbs1H5ZtVuOfvQ==", + "dev": true, + "dependencies": { + "character-entities-html4": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.2", + "is-hexadecimal": "^1.0.0" + } + }, + "node_modules/style-to-object": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.2.3.tgz", + "integrity": "sha512-1d/k4EY2N7jVLOqf2j04dTc37TPOv/hHxZmvpg8Pdh8UYydxeu/C1W1U4vD8alzf5V2Gt7rLsmkr4dxAlDm9ng==", + "dev": true, + "dependencies": { + "inline-style-parser": "0.1.1" + } + }, + "node_modules/to-vfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-6.1.0.tgz", + "integrity": "sha512-BxX8EkCxOAZe+D/ToHdDsJcVI4HqQfmw0tCkp31zf3dNP/XWIAjU4CmeuSwsSoOzOTqHPOL0KUzyZqJplkD0Qw==", + "dev": true, + "dependencies": { + "is-buffer": "^2.0.0", + "vfile": "^4.0.0" + } + }, + "node_modules/trim": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", + "integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=", + "dev": true + }, + "node_modules/trim-trailing-lines": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.3.tgz", + "integrity": "sha512-4ku0mmjXifQcTVfYDfR5lpgV7zVqPg6zV9rdZmwOPqq0+Zq19xDqEgagqVbc4pOOShbncuAOIs59R3+3gcF3ZA==", + "dev": true + }, + "node_modules/trough": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", + "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", + "dev": true + }, + "node_modules/unherit": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", + "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", + "dev": true, + "dependencies": { + "inherits": "^2.0.0", + "xtend": "^4.0.0" + } + }, + "node_modules/unified": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", + "integrity": "sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg==", + "dev": true, + "dependencies": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^2.0.0", + "trough": "^1.0.0", + "vfile": "^4.0.0" + } + }, + "node_modules/unist-builder": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", + "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==", + "dev": true + }, + "node_modules/unist-util-find": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/unist-util-find/-/unist-util-find-1.0.1.tgz", + "integrity": "sha1-EGK7tpKMepfGrcibU3RdTEbCIqI=", + "dev": true, + "dependencies": { + "lodash.iteratee": "^4.5.0", + "remark": "^5.0.1", + "unist-util-visit": "^1.1.0" + } + }, + "node_modules/unist-util-find/node_modules/unist-util-visit": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-1.4.1.tgz", + "integrity": "sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==", + "dev": true, + "dependencies": { + "unist-util-visit-parents": "^2.0.0" + } + }, + "node_modules/unist-util-find/node_modules/unist-util-visit-parents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz", + "integrity": "sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==", + "dev": true, + "dependencies": { + "unist-util-is": "^3.0.0" + } + }, + "node_modules/unist-util-generated": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.5.tgz", + "integrity": "sha512-1TC+NxQa4N9pNdayCYA1EGUOCAO0Le3fVp7Jzns6lnua/mYgwHo0tz5WUAfrdpNch1RZLHc61VZ1SDgrtNXLSw==", + "dev": true + }, + "node_modules/unist-util-is": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-3.0.0.tgz", + "integrity": "sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A==", + "dev": true + }, + "node_modules/unist-util-position": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", + "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", + "dev": true + }, + "node_modules/unist-util-select": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/unist-util-select/-/unist-util-select-3.0.1.tgz", + "integrity": "sha512-VQpTuqZVJlRbosQdnLdTPIIqwZeU70YZ5aMBOqtFNGeeCdYn6ORZt/9RiaVlbl06ocuf58SVMoFa7a13CSGPMA==", + "dev": true, + "dependencies": { + "css-selector-parser": "^1.0.0", + "not": "^0.1.0", + "nth-check": "^1.0.0", + "unist-util-is": "^4.0.0", + "zwitch": "^1.0.0" + } + }, + "node_modules/unist-util-select/node_modules/unist-util-is": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.2.tgz", + "integrity": "sha512-Ofx8uf6haexJwI1gxWMGg6I/dLnF2yE+KibhD3/diOqY2TinLcqHXCV6OI5gFVn3xQqDH+u0M625pfKwIwgBKQ==", + "dev": true + }, + "node_modules/unist-util-stringify-position": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", + "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.2" + } + }, + "node_modules/unist-util-visit": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", + "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.0.tgz", + "integrity": "sha512-0g4wbluTF93npyPrp/ymd3tCDTMnP0yo2akFD2FIBAYXq/Sga3lwaU1D8OYKbtpioaI6CkDcQ6fsMnmtzt7htw==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0" + } + }, + "node_modules/unist-util-visit-parents/node_modules/unist-util-is": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.2.tgz", + "integrity": "sha512-Ofx8uf6haexJwI1gxWMGg6I/dLnF2yE+KibhD3/diOqY2TinLcqHXCV6OI5gFVn3xQqDH+u0M625pfKwIwgBKQ==", + "dev": true + }, + "node_modules/unist-util-visit/node_modules/unist-util-is": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.2.tgz", + "integrity": "sha512-Ofx8uf6haexJwI1gxWMGg6I/dLnF2yE+KibhD3/diOqY2TinLcqHXCV6OI5gFVn3xQqDH+u0M625pfKwIwgBKQ==", + "dev": true + }, + "node_modules/vfile": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.0.tgz", + "integrity": "sha512-a/alcwCvtuc8OX92rqqo7PflxiCgXRFjdyoGVuYV+qbgCb0GgZJRvIgCD4+U/Kl1yhaRsaTwksF88xbPyGsgpw==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "replace-ext": "1.0.0", + "unist-util-stringify-position": "^2.0.0", + "vfile-message": "^2.0.0" + } + }, + "node_modules/vfile-message": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", + "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^2.0.0" + } + }, + "node_modules/web-namespaces": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", + "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", + "dev": true + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true + }, + "node_modules/zwitch": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", + "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", + "dev": true + } + }, "dependencies": { "@types/mdast": { "version": "3.0.3", @@ -88,6 +1243,15 @@ "integrity": "sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g==", "dev": true }, + "debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", @@ -300,18 +1464,6 @@ "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "dev": true }, - "is-whitespace-character": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz", - "integrity": "sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w==", - "dev": true - }, - "is-word-character": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz", - "integrity": "sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA==", - "dev": true - }, "js-yaml": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", @@ -334,12 +1486,6 @@ "integrity": "sha1-0GWXxNTDG1LMsfXY+P5xSOr9aWU=", "dev": true }, - "markdown-escapes": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz", - "integrity": "sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg==", - "dev": true - }, "markdown-table": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-0.4.0.tgz", @@ -355,6 +1501,75 @@ "unist-util-visit": "^2.0.0" } }, + "mdast-util-from-markdown": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.1.tgz", + "integrity": "sha512-qJXNcFcuCSPqUF0Tb0uYcFDIq67qwB3sxo9RPdf9vG8T90ViKnksFqdB/Coq2a7sTnxL/Ify2y7aIQXDkQFH0w==", + "dev": true, + "requires": { + "@types/mdast": "^3.0.0", + "mdast-util-to-string": "^1.0.0", + "micromark": "~2.10.0", + "parse-entities": "^2.0.0" + } + }, + "mdast-util-gfm": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.0.tgz", + "integrity": "sha512-HLfygQL6HdhJhFbLta4Ki9hClrzyAxRjyRvpm5caN65QZL+NyHPmqFlnF9vm1Rn58JT2+AbLwNcEDY4MEvkk8Q==", + "dev": true, + "requires": { + "mdast-util-gfm-autolink-literal": "^0.1.0", + "mdast-util-gfm-strikethrough": "^0.2.0", + "mdast-util-gfm-table": "^0.1.0", + "mdast-util-gfm-task-list-item": "^0.1.0" + } + }, + "mdast-util-gfm-autolink-literal": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.1.tgz", + "integrity": "sha512-gJ2xSpqKCetSr22GEWpZH3f5ffb4pPn/72m4piY0v7T/S+O7n7rw+sfoPLhb2b4O7WdnERoYdALRcmD68FMtlw==", + "dev": true + }, + "mdast-util-gfm-strikethrough": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.2.tgz", + "integrity": "sha512-T37ZbaokJcRbHROXmoVAieWnesPD5N21tv2ifYzaGRLbkh1gknItUGhZzHefUn5Zc/eaO/iTDSAFOBrn/E8kWw==", + "dev": true, + "requires": { + "mdast-util-to-markdown": "^0.5.0" + } + }, + "mdast-util-gfm-table": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.4.tgz", + "integrity": "sha512-T4xFSON9kUb/IpYA5N+KGWcsdGczAvILvKiXQwUGind6V9fvjPCR9yhZnIeaLdBWXaz3m/Gq77ZtuLMjtFR4IQ==", + "dev": true, + "requires": { + "markdown-table": "^2.0.0", + "mdast-util-to-markdown": "^0.5.0" + }, + "dependencies": { + "markdown-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", + "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dev": true, + "requires": { + "repeat-string": "^1.0.0" + } + } + } + }, + "mdast-util-gfm-task-list-item": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.5.tgz", + "integrity": "sha512-6O0bt34r+e7kYjeSwedhjDPYraspKIYKbhvhQEEioL7gSmXDxhN7WQW2KoxhVMpNzjNc03yC7K5KH6NHlz2jOA==", + "dev": true, + "requires": { + "mdast-util-to-markdown": "^0.5.0" + } + }, "mdast-util-to-hast": { "version": "9.1.1", "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-9.1.1.tgz", @@ -371,12 +1586,112 @@ "unist-util-visit": "^2.0.0" } }, + "mdast-util-to-markdown": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.5.3.tgz", + "integrity": "sha512-sr8q7fQJ1xoCqZSXW6dO/MYu2Md+a4Hfk9uO+XHCfiBhVM0EgWtfAV7BuN+ff6otUeu2xDyt1o7vhZGwOG3+BA==", + "dev": true, + "requires": { + "@types/unist": "^2.0.0", + "longest-streak": "^2.0.0", + "mdast-util-to-string": "^1.0.0", + "parse-entities": "^2.0.0", + "repeat-string": "^1.0.0", + "zwitch": "^1.0.0" + }, + "dependencies": { + "longest-streak": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", + "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "dev": true + } + } + }, + "mdast-util-to-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-1.1.0.tgz", + "integrity": "sha512-jVU0Nr2B9X3MU4tSK7JP1CMkSvOj7X5l/GboG1tKRw52lLF1x2Ju92Ms9tNetCcbfX3hzlM73zYo2NKkWSfF/A==", + "dev": true + }, "mdurl": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", "dev": true }, + "micromark": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.10.1.tgz", + "integrity": "sha512-fUuVF8sC1X7wsCS29SYQ2ZfIZYbTymp0EYr6sab3idFjigFFjGa5UwoniPlV9tAgntjuapW1t9U+S0yDYeGKHQ==", + "dev": true, + "requires": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "micromark-extension-gfm": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.1.tgz", + "integrity": "sha512-lJlhcOqzoJdjQg+LMumVHdUQ61LjtqGdmZtrAdfvatRUnJTqZlRwXXHdLQgNDYlFw4mycZ4NSTKlya5QcQXl1A==", + "dev": true, + "requires": { + "micromark": "~2.10.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.0", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" + } + }, + "micromark-extension-gfm-autolink-literal": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.1.tgz", + "integrity": "sha512-j30923tDp0faCNDjwqe4cMi+slegbGfc3VEAExEU8d54Q/F6pR6YxCVH+6xV0ItRoj3lCn1XkUWcy6FC3S9BOw==", + "dev": true, + "requires": { + "micromark": "~2.10.0" + } + }, + "micromark-extension-gfm-strikethrough": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.2.tgz", + "integrity": "sha512-aehEEqtTn3JekJNwZZxa7ZJVfzmuaWp4ew6x6sl3VAKIwdDZdqYeYSQIrNKwNgH7hX0g56fAwnSDLusJggjlCQ==", + "dev": true, + "requires": { + "micromark": "~2.10.0" + } + }, + "micromark-extension-gfm-table": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.1.tgz", + "integrity": "sha512-xVpqOnfFaa2OtC/Y7rlt4tdVFlUHdoLH3RXAZgb/KP3DDyKsAOx6BRS3UxiiyvmD/p2l6VUpD4bMIniuP4o4JA==", + "dev": true, + "requires": { + "micromark": "~2.10.0" + } + }, + "micromark-extension-gfm-tagfilter": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", + "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "dev": true + }, + "micromark-extension-gfm-task-list-item": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.2.tgz", + "integrity": "sha512-cm8lYS10YAqeXE9B27TK3u1Ihumo3H9p/3XumT+jp8vSuSbSpFIJe0bDi2kq4YAAIxtcTzUOxhEH4ko2/NYDkQ==", + "dev": true, + "requires": { + "micromark": "~2.10.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, "not": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/not/-/not-0.1.0.tgz", @@ -545,6 +1860,16 @@ } } }, + "remark-gfm": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", + "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "dev": true, + "requires": { + "mdast-util-gfm": "^0.1.0", + "micromark-extension-gfm": "^0.3.0" + } + }, "remark-html": { "version": "12.0.0", "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-12.0.0.tgz", @@ -558,27 +1883,12 @@ } }, "remark-parse": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz", - "integrity": "sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", + "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", "dev": true, "requires": { - "ccount": "^1.0.0", - "collapse-white-space": "^1.0.2", - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-whitespace-character": "^1.0.0", - "is-word-character": "^1.0.0", - "markdown-escapes": "^1.0.0", - "parse-entities": "^2.0.0", - "repeat-string": "^1.5.4", - "state-toggle": "^1.0.0", - "trim": "0.0.1", - "trim-trailing-lines": "^1.0.0", - "unherit": "^1.0.4", - "unist-util-remove-position": "^2.0.0", - "vfile-location": "^3.0.0", - "xtend": "^4.0.1" + "mdast-util-from-markdown": "^0.8.0" } }, "remark-rehype": { @@ -658,12 +1968,6 @@ "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", "dev": true }, - "state-toggle": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz", - "integrity": "sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ==", - "dev": true - }, "stringify-entities": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-3.0.1.tgz", @@ -793,15 +2097,6 @@ "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", "dev": true }, - "unist-util-remove-position": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz", - "integrity": "sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA==", - "dev": true, - "requires": { - "unist-util-visit": "^2.0.0" - } - }, "unist-util-select": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/unist-util-select/-/unist-util-select-3.0.1.tgz", @@ -882,12 +2177,6 @@ "vfile-message": "^2.0.0" } }, - "vfile-location": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.1.0.tgz", - "integrity": "sha512-FCZ4AN9xMcjFIG1oGmZKo61PjwJHRVA+0/tPUP2ul4uIwjGGndIxavEMRpWn5p4xwm/ZsdXp9YNygf1ZyE4x8g==", - "dev": true - }, "vfile-message": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", diff --git a/tools/doc/package.json b/tools/doc/package.json index e3e60831a85e57..d396e23248a63c 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -11,8 +11,9 @@ "js-yaml": "3.14.0", "rehype-raw": "4.0.2", "rehype-stringify": "8.0.0", + "remark-gfm": "^1.0.0", "remark-html": "12.0.0", - "remark-parse": "8.0.3", + "remark-parse": "^9.0.0", "remark-rehype": "7.0.0", "to-vfile": "6.1.0", "unified": "9.2.0", @@ -20,6 +21,7 @@ "unist-util-select": "3.0.1", "unist-util-visit": "2.0.3" }, - "optionalDependencies": {}, - "bin": "./generate.js" + "bin": { + "node-doc-generator": "generate.js" + } } From 4140f491fdfb0e8c036fdb14b0f9473f0ef0e9f7 Mon Sep 17 00:00:00 2001 From: raisinten Date: Mon, 9 Nov 2020 19:52:20 +0530 Subject: [PATCH 16/98] util: fix to inspect getters that access this Fixes: https://github.com/nodejs/node/issues/36045 Co-authored-by: Antoine du Hamel PR-URL: https://github.com/nodejs/node/pull/36052 Reviewed-By: Anna Henningsen Reviewed-By: Antoine du Hamel Reviewed-By: Rich Trott --- lib/internal/util/inspect.js | 8 +++-- ...est-util-inspect-getters-accessing-this.js | 30 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 test/parallel/test-util-inspect-getters-accessing-this.js diff --git a/lib/internal/util/inspect.js b/lib/internal/util/inspect.js index 4e1ed55c81cd98..59c4c8a833e445 100644 --- a/lib/internal/util/inspect.js +++ b/lib/internal/util/inspect.js @@ -46,6 +46,7 @@ const { ObjectPrototypePropertyIsEnumerable, ObjectSeal, ObjectSetPrototypeOf, + ReflectApply, RegExp, RegExpPrototypeToString, Set, @@ -627,7 +628,7 @@ function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { continue; } const value = formatProperty( - ctx, obj, recurseTimes, key, kObjectType, desc); + ctx, obj, recurseTimes, key, kObjectType, desc, main); if (ctx.colors) { // Faint! output.push(`\u001b[2m${value}\u001b[22m`); @@ -1677,7 +1678,8 @@ function formatPromise(ctx, value, recurseTimes) { return output; } -function formatProperty(ctx, value, recurseTimes, key, type, desc) { +function formatProperty(ctx, value, recurseTimes, key, type, desc, + original = value) { let name, str; let extra = ' '; desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || @@ -1698,7 +1700,7 @@ function formatProperty(ctx, value, recurseTimes, key, type, desc) { (ctx.getters === 'get' && desc.set === undefined) || (ctx.getters === 'set' && desc.set !== undefined))) { try { - const tmp = value[key]; + const tmp = ReflectApply(desc.get, original, []); ctx.indentationLvl += 2; if (tmp === null) { str = `${s(`[${label}:`, sp)} ${s('null', 'null')}${s(']', sp)}`; diff --git a/test/parallel/test-util-inspect-getters-accessing-this.js b/test/parallel/test-util-inspect-getters-accessing-this.js new file mode 100644 index 00000000000000..3d185b134e852d --- /dev/null +++ b/test/parallel/test-util-inspect-getters-accessing-this.js @@ -0,0 +1,30 @@ +'use strict'; + +require('../common'); + +// This test ensures that util.inspect logs getters +// which access this. + +const assert = require('assert'); + +const util = require('util'); + +class X { + constructor() { + this._y = 123; + } + + get y() { + return this._y; + } +} + +const result = util.inspect(new X(), { + getters: true, + showHidden: true +}); + +assert.strictEqual( + result, + 'X { _y: 123, [y]: [Getter: 123] }' +); From d2c757ab19adebbb08db915086e9a55e3e410270 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20Andr=C3=A9=20Vadla=20Ravn=C3=A5s?= Date: Fri, 6 Nov 2020 02:46:09 +0100 Subject: [PATCH 17/98] deps: V8: cherry-pick 27e1ac1a79ff Original commit message: [wasm][mac] Support w^x codespaces for Apple Silicon Apple's upcoming arm64 devices will prevent rwx access to memory, but in turn provide a new per-thread way to switch between write and execute permissions. This patch puts that system to use for the WebAssembly subsystem. The approach relies on CodeSpaceWriteScope objects for now. That isn't optimal for background threads (which could stay in "write" mode permanently instead of toggling), but its simplicity makes it a good first step. Background: https://developer.apple.com/documentation/apple_silicon/porting_just-in-time_compilers_to_apple_silicon Bug: chromium:1117591 Change-Id: I3b60f0efd34c0fed924dfc71ee2c7805801c5d42 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2378307 Commit-Queue: Jakob Kummerow Reviewed-by: Michael Lippautz Reviewed-by: Thibaud Michaud Cr-Commit-Position: refs/heads/master@{#69791} PR-URL: https://github.com/nodejs/node/pull/35986 Reviewed-By: Anna Henningsen Reviewed-By: Richard Lau Reviewed-By: Michael Dawson Reviewed-By: Rich Trott Reviewed-By: Beth Griggs Reviewed-By: Jiawen Geng --- common.gypi | 2 +- deps/v8/BUILD.gn | 1 + deps/v8/src/base/platform/platform-posix.cc | 8 +++ deps/v8/src/wasm/code-space-access.h | 69 +++++++++++++++++++ deps/v8/src/wasm/wasm-code-manager.cc | 15 ++++ deps/v8/src/wasm/wasm-serialization.cc | 2 + deps/v8/test/cctest/cctest.status | 7 ++ deps/v8/test/cctest/test-assembler-arm64.cc | 4 +- .../test/cctest/test-code-stub-assembler.cc | 5 +- deps/v8/test/cctest/test-icache.cc | 5 ++ .../cctest/wasm/test-jump-table-assembler.cc | 9 +++ deps/v8/test/unittests/unittests.status | 21 ++++++ 12 files changed, 143 insertions(+), 5 deletions(-) create mode 100644 deps/v8/src/wasm/code-space-access.h diff --git a/common.gypi b/common.gypi index 4745bb5ac77639..e610650a01d4ab 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.17', + 'v8_embedder_string': '-node.18', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn index bda33c185fe683..219837ff45e9e3 100644 --- a/deps/v8/BUILD.gn +++ b/deps/v8/BUILD.gn @@ -3237,6 +3237,7 @@ v8_source_set("v8_base_without_compiler") { "src/wasm/baseline/liftoff-compiler.cc", "src/wasm/baseline/liftoff-compiler.h", "src/wasm/baseline/liftoff-register.h", + "src/wasm/code-space-access.h", "src/wasm/compilation-environment.h", "src/wasm/decoder.h", "src/wasm/function-body-decoder-impl.h", diff --git a/deps/v8/src/base/platform/platform-posix.cc b/deps/v8/src/base/platform/platform-posix.cc index 14294019d90dd0..b765ad1897a3cf 100644 --- a/deps/v8/src/base/platform/platform-posix.cc +++ b/deps/v8/src/base/platform/platform-posix.cc @@ -151,6 +151,14 @@ int GetFlagsForMemoryPermission(OS::MemoryPermission access, #if V8_OS_QNX flags |= MAP_LAZY; #endif // V8_OS_QNX +#if V8_OS_MACOSX && V8_HOST_ARCH_ARM64 && defined(MAP_JIT) && \ + !defined(V8_OS_IOS) + // TODO(jkummerow): using the V8_OS_IOS define is a crude approximation + // of the fact that we don't want to set the MAP_JIT flag when + // FLAG_jitless == true, as src/base/ doesn't know any flags. + // TODO(crbug.com/1117591): This is only needed for code spaces. + flags |= MAP_JIT; +#endif } return flags; } diff --git a/deps/v8/src/wasm/code-space-access.h b/deps/v8/src/wasm/code-space-access.h new file mode 100644 index 00000000000000..5eeb980e17eddc --- /dev/null +++ b/deps/v8/src/wasm/code-space-access.h @@ -0,0 +1,69 @@ +// Copyright 2020 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_WASM_CODE_SPACE_ACCESS_H_ +#define V8_WASM_CODE_SPACE_ACCESS_H_ + +#include "src/base/build_config.h" +#include "src/base/macros.h" +#include "src/common/globals.h" + +namespace v8 { +namespace internal { + +#if defined(V8_OS_MACOSX) && defined(V8_HOST_ARCH_ARM64) + +// Ignoring this warning is considered better than relying on +// __builtin_available. +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunguarded-availability-new" +inline void SwitchMemoryPermissionsToWritable() { + pthread_jit_write_protect_np(0); +} +inline void SwitchMemoryPermissionsToExecutable() { + pthread_jit_write_protect_np(1); +} +#pragma clang diagnostic pop + +namespace wasm { + +class CodeSpaceWriteScope { + public: + // TODO(jkummerow): Background threads could permanently stay in + // writable mode; only the main thread has to switch back and forth. + CodeSpaceWriteScope() { + if (code_space_write_nesting_level_ == 0) { + SwitchMemoryPermissionsToWritable(); + } + code_space_write_nesting_level_++; + } + ~CodeSpaceWriteScope() { + code_space_write_nesting_level_--; + if (code_space_write_nesting_level_ == 0) { + SwitchMemoryPermissionsToExecutable(); + } + } + + private: + static thread_local int code_space_write_nesting_level_; +}; + +#define CODE_SPACE_WRITE_SCOPE CodeSpaceWriteScope _write_access_; + +} // namespace wasm + +#else // Not Mac-on-arm64. + +// Nothing to do, we map code memory with rwx permissions. +inline void SwitchMemoryPermissionsToWritable() {} +inline void SwitchMemoryPermissionsToExecutable() {} + +#define CODE_SPACE_WRITE_SCOPE + +#endif // V8_OS_MACOSX && V8_HOST_ARCH_ARM64 + +} // namespace internal +} // namespace v8 + +#endif // V8_WASM_CODE_SPACE_ACCESS_H_ diff --git a/deps/v8/src/wasm/wasm-code-manager.cc b/deps/v8/src/wasm/wasm-code-manager.cc index 2f5a3f479b2475..8bc6a353405831 100644 --- a/deps/v8/src/wasm/wasm-code-manager.cc +++ b/deps/v8/src/wasm/wasm-code-manager.cc @@ -6,6 +6,7 @@ #include +#include "src/base/build_config.h" #include "src/base/iterator.h" #include "src/base/macros.h" #include "src/base/platform/platform.h" @@ -21,6 +22,7 @@ #include "src/snapshot/embedded/embedded-data.h" #include "src/utils/ostreams.h" #include "src/utils/vector.h" +#include "src/wasm/code-space-access.h" #include "src/wasm/compilation-environment.h" #include "src/wasm/function-compiler.h" #include "src/wasm/jump-table-assembler.h" @@ -47,6 +49,10 @@ namespace wasm { using trap_handler::ProtectedInstructionData; +#if defined(V8_OS_MACOSX) && defined(V8_HOST_ARCH_ARM64) +thread_local int CodeSpaceWriteScope::code_space_write_nesting_level_ = 0; +#endif + base::AddressRegion DisjointAllocationPool::Merge( base::AddressRegion new_region) { // Find the possible insertion position by identifying the first region whose @@ -731,6 +737,7 @@ void WasmCodeAllocator::FreeCode(Vector codes) { // Zap code area and collect freed code regions. DisjointAllocationPool freed_regions; size_t code_size = 0; + CODE_SPACE_WRITE_SCOPE for (WasmCode* code : codes) { ZapCode(code->instruction_start(), code->instructions().size()); FlushInstructionCache(code->instruction_start(), @@ -847,6 +854,7 @@ CompilationEnv NativeModule::CreateCompilationEnv() const { } WasmCode* NativeModule::AddCodeForTesting(Handle code) { + CODE_SPACE_WRITE_SCOPE // For off-heap builtins, we create a copy of the off-heap instruction stream // instead of the on-heap code object containing the trampoline. Ensure that // we do not apply the on-heap reloc info to the off-heap instructions. @@ -942,6 +950,7 @@ void NativeModule::UseLazyStub(uint32_t func_index) { if (!lazy_compile_table_) { uint32_t num_slots = module_->num_declared_functions; WasmCodeRefScope code_ref_scope; + CODE_SPACE_WRITE_SCOPE base::AddressRegion single_code_space_region; { base::MutexGuard guard(&allocation_mutex_); @@ -1003,6 +1012,7 @@ std::unique_ptr NativeModule::AddCodeWithCodeSpace( const int code_comments_offset = desc.code_comments_offset; const int instr_size = desc.instr_size; + CODE_SPACE_WRITE_SCOPE memcpy(dst_code_bytes.begin(), desc.buffer, static_cast(desc.instr_size)); @@ -1138,6 +1148,7 @@ WasmCode* NativeModule::AddDeserializedCode( Vector protected_instructions_data, Vector reloc_info, Vector source_position_table, WasmCode::Kind kind, ExecutionTier tier) { + // CodeSpaceWriteScope is provided by the caller. Vector dst_code_bytes = code_allocator_.AllocateForCode(this, instructions.size()); memcpy(dst_code_bytes.begin(), instructions.begin(), instructions.size()); @@ -1196,6 +1207,7 @@ WasmCode* NativeModule::CreateEmptyJumpTableInRegion( Vector code_space = code_allocator_.AllocateForCodeInRegion( this, jump_table_size, region, allocator_lock); DCHECK(!code_space.empty()); + CODE_SPACE_WRITE_SCOPE ZapCode(reinterpret_cast
(code_space.begin()), code_space.size()); std::unique_ptr code{ new WasmCode{this, // native_module @@ -1221,6 +1233,7 @@ void NativeModule::PatchJumpTablesLocked(uint32_t slot_index, Address target) { // The caller must hold the {allocation_mutex_}, thus we fail to lock it here. DCHECK(!allocation_mutex_.TryLock()); + CODE_SPACE_WRITE_SCOPE for (auto& code_space_data : code_space_data_) { DCHECK_IMPLIES(code_space_data.jump_table, code_space_data.far_jump_table); if (!code_space_data.jump_table) continue; @@ -1283,6 +1296,7 @@ void NativeModule::AddCodeSpace( #endif // V8_OS_WIN64 WasmCodeRefScope code_ref_scope; + CODE_SPACE_WRITE_SCOPE WasmCode* jump_table = nullptr; WasmCode* far_jump_table = nullptr; const uint32_t num_wasm_functions = module_->num_declared_functions; @@ -1843,6 +1857,7 @@ std::vector> NativeModule::AddCompiledCode( generated_code.reserve(results.size()); // Now copy the generated code into the code space and relocate it. + CODE_SPACE_WRITE_SCOPE for (auto& result : results) { DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get()); size_t code_size = RoundUp(result.code_desc.instr_size); diff --git a/deps/v8/src/wasm/wasm-serialization.cc b/deps/v8/src/wasm/wasm-serialization.cc index e5bab7e2cdc57c..f4f5f992682a06 100644 --- a/deps/v8/src/wasm/wasm-serialization.cc +++ b/deps/v8/src/wasm/wasm-serialization.cc @@ -13,6 +13,7 @@ #include "src/utils/ostreams.h" #include "src/utils/utils.h" #include "src/utils/version.h" +#include "src/wasm/code-space-access.h" #include "src/wasm/function-compiler.h" #include "src/wasm/module-compiler.h" #include "src/wasm/module-decoder.h" @@ -534,6 +535,7 @@ void NativeModuleDeserializer::ReadCode(int fn_index, Reader* reader) { auto protected_instructions = reader->ReadVector(protected_instructions_size); + CODE_SPACE_WRITE_SCOPE WasmCode* code = native_module_->AddDeserializedCode( fn_index, code_buffer, stack_slot_count, tagged_parameter_slots, safepoint_table_offset, handler_table_offset, constant_pool_offset, diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index 21db27c5d3d4e4..40580c08cd3157 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -176,6 +176,13 @@ 'test-debug/DebugBreakStackTrace': [PASS, SLOW], }], # 'arch == arm64 and simulator_run' +['arch == arm64 and system == macos and not simulator_run', { + # printf, being a variadic function, has a different, stack-based ABI on + # Apple silicon. See: + # https://developer.apple.com/library/archive/documentation/Xcode/Conceptual/iPhoneOSABIReference/Articles/ARM64FunctionCallingConventions.html + 'test-assembler-arm64/printf_no_preserve': [SKIP], +}], # arch == arm64 and system == macos and not simulator_run + ############################################################################## ['variant == nooptimization and (arch == arm or arch == arm64) and simulator_run', { # Slow tests: https://crbug.com/v8/7783 diff --git a/deps/v8/test/cctest/test-assembler-arm64.cc b/deps/v8/test/cctest/test-assembler-arm64.cc index 19da59e1727f80..52aaf3162b1991 100644 --- a/deps/v8/test/cctest/test-assembler-arm64.cc +++ b/deps/v8/test/cctest/test-assembler-arm64.cc @@ -11720,9 +11720,9 @@ TEST(system_msr) { const uint64_t fpcr_core = 0x07C00000; // All FPCR fields (including fields which may be read-as-zero): - // Stride, Len + // Stride, FZ16, Len // IDE, IXE, UFE, OFE, DZE, IOE - const uint64_t fpcr_all = fpcr_core | 0x00379F00; + const uint64_t fpcr_all = fpcr_core | 0x003F9F00; SETUP(); diff --git a/deps/v8/test/cctest/test-code-stub-assembler.cc b/deps/v8/test/cctest/test-code-stub-assembler.cc index 263951b573fd04..f79b848dc1b581 100644 --- a/deps/v8/test/cctest/test-code-stub-assembler.cc +++ b/deps/v8/test/cctest/test-code-stub-assembler.cc @@ -41,8 +41,9 @@ template using TVariable = TypedCodeAssemblerVariable; using PromiseResolvingFunctions = TorqueStructPromiseResolvingFunctions; -int sum10(int a0, int a1, int a2, int a3, int a4, int a5, int a6, int a7, - int a8, int a9) { +intptr_t sum10(intptr_t a0, intptr_t a1, intptr_t a2, intptr_t a3, intptr_t a4, + intptr_t a5, intptr_t a6, intptr_t a7, intptr_t a8, + intptr_t a9) { return a0 + a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9; } diff --git a/deps/v8/test/cctest/test-icache.cc b/deps/v8/test/cctest/test-icache.cc index e8c89b7232b3b0..82baa9fe96212d 100644 --- a/deps/v8/test/cctest/test-icache.cc +++ b/deps/v8/test/cctest/test-icache.cc @@ -6,6 +6,7 @@ #include "src/codegen/macro-assembler-inl.h" #include "src/execution/simulator.h" #include "src/handles/handles-inl.h" +#include "src/wasm/code-space-access.h" #include "test/cctest/cctest.h" #include "test/common/assembler-tester.h" @@ -179,11 +180,15 @@ TEST(TestFlushICacheOfWritableAndExecutable) { CHECK(SetPermissions(GetPlatformPageAllocator(), buffer->start(), buffer->size(), v8::PageAllocator::kReadWriteExecute)); + SwitchMemoryPermissionsToWritable(); FloodWithInc(isolate, buffer.get()); FlushInstructionCache(buffer->start(), buffer->size()); + SwitchMemoryPermissionsToExecutable(); CHECK_EQ(23 + kNumInstr, f.Call(23)); // Call into generated code. + SwitchMemoryPermissionsToWritable(); FloodWithNop(isolate, buffer.get()); FlushInstructionCache(buffer->start(), buffer->size()); + SwitchMemoryPermissionsToExecutable(); CHECK_EQ(23, f.Call(23)); // Call into generated code. } } diff --git a/deps/v8/test/cctest/wasm/test-jump-table-assembler.cc b/deps/v8/test/cctest/wasm/test-jump-table-assembler.cc index 99ec7d25ab457c..a0dd4cc33be301 100644 --- a/deps/v8/test/cctest/wasm/test-jump-table-assembler.cc +++ b/deps/v8/test/cctest/wasm/test-jump-table-assembler.cc @@ -8,6 +8,7 @@ #include "src/codegen/macro-assembler-inl.h" #include "src/execution/simulator.h" #include "src/utils/utils.h" +#include "src/wasm/code-space-access.h" #include "src/wasm/jump-table-assembler.h" #include "test/cctest/cctest.h" #include "test/common/assembler-tester.h" @@ -33,7 +34,12 @@ constexpr uint32_t kJumpTableSize = JumpTableAssembler::SizeForNumberOfSlots(kJumpTableSlotCount); // Must be a safe commit page size. +#if V8_OS_MACOSX && V8_HOST_ARCH_ARM64 +// See kAppleArmPageSize in platform-posix.cc. +constexpr size_t kThunkBufferSize = 1 << 14; +#else constexpr size_t kThunkBufferSize = 4 * KB; +#endif #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_X64 constexpr uint32_t kAvailableBufferSlots = @@ -154,6 +160,7 @@ class JumpTableRunner : public v8::base::Thread { void Run() override { TRACE("Runner #%d is starting ...\n", runner_id_); + SwitchMemoryPermissionsToExecutable(); GeneratedCode::FromAddress(CcTest::i_isolate(), slot_address_).Call(); TRACE("Runner #%d is stopping ...\n", runner_id_); USE(runner_id_); @@ -176,6 +183,7 @@ class JumpTablePatcher : public v8::base::Thread { void Run() override { TRACE("Patcher %p is starting ...\n", this); + SwitchMemoryPermissionsToWritable(); Address slot_address = slot_start_ + JumpTableAssembler::JumpSlotIndexToOffset(slot_index_); // First, emit code to the two thunks. @@ -235,6 +243,7 @@ TEST(JumpTablePatchingStress) { std::bitset used_thunk_slots; buffer->MakeWritableAndExecutable(); + SwitchMemoryPermissionsToWritable(); // Iterate through jump-table slots to hammer at different alignments within // the jump-table, thereby increasing stress for variable-length ISAs. diff --git a/deps/v8/test/unittests/unittests.status b/deps/v8/test/unittests/unittests.status index 20406242778652..96dd893db20568 100644 --- a/deps/v8/test/unittests/unittests.status +++ b/deps/v8/test/unittests/unittests.status @@ -17,6 +17,27 @@ 'RandomNumberGenerator.NextSampleSlowInvalidParam2': [SKIP], }], # system == macos and asan +['system == macos and arch == arm64 and not simulator_run', { + # Throwing C++ exceptions doesn't work; probably because the unittests + # binary is built with -fno-exceptions? + 'LanguageServerJson.LexerError': [SKIP], + 'LanguageServerJson.ParserError': [SKIP], + 'Torque.DoubleUnderScorePrefixIllegalForIdentifiers': [SKIP], + 'Torque.Enums': [SKIP], + 'Torque.ImportNonExistentFile': [SKIP], + + # Test uses fancy signal handling. Needs investigation. + 'MemoryAllocationPermissionsTest.DoTest': [SKIP], + + # cppgc::internal::kGuardPageSize is smaller than kAppleArmPageSize. + 'PageMemoryRegionTest.PlatformUsesGuardPages': [FAIL], + + # Time tick resolution appears to be ~42 microseconds. Tests expect 1 us. + 'TimeTicks.NowResolution': [FAIL], + 'RuntimeCallStatsTest.BasicJavaScript': [SKIP], + 'RuntimeCallStatsTest.FunctionLengthGetter': [SKIP], +}], # system == macos and arch == arm64 and not simulator_run + ############################################################################## ['lite_mode or variant == jitless', { # TODO(v8:7777): Re-enable once wasm is supported in jitless mode. From 098a5b12985415ed93d7afa261f196d2c4c20c5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20Andr=C3=A9=20Vadla=20Ravn=C3=A5s?= Date: Fri, 6 Nov 2020 02:46:17 +0100 Subject: [PATCH 18/98] deps: V8: cherry-pick 086eecbd96b6 Original commit message: [platform] Add Permission::kNoAccessWillJitLater enum value This value is unused for now. This CL is part 1 of a 3-step dance. Part 2 will be teaching Chrome's Platform implementation to accept the new value. Part 3 will then actually use it in V8. Bug: chromium:1117591 Change-Id: Ie3aed20d4cc58f3def3be2a3a03bba4c3a37bf44 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2450056 Commit-Queue: Jakob Kummerow Reviewed-by: Michael Lippautz Cr-Commit-Position: refs/heads/master@{#70335} PR-URL: https://github.com/nodejs/node/pull/35986 Reviewed-By: Anna Henningsen Reviewed-By: Richard Lau Reviewed-By: Michael Dawson Reviewed-By: Rich Trott Reviewed-By: Beth Griggs Reviewed-By: Jiawen Geng --- common.gypi | 2 +- deps/v8/include/v8-platform.h | 8 +++++++- deps/v8/test/unittests/heap/unmapper-unittest.cc | 1 + 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/common.gypi b/common.gypi index e610650a01d4ab..cae6662364984c 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.18', + 'v8_embedder_string': '-node.19', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/include/v8-platform.h b/deps/v8/include/v8-platform.h index aae381b080617f..1bf75a1d42fc78 100644 --- a/deps/v8/include/v8-platform.h +++ b/deps/v8/include/v8-platform.h @@ -384,7 +384,13 @@ class PageAllocator { kReadWrite, // TODO(hpayer): Remove this flag. Memory should never be rwx. kReadWriteExecute, - kReadExecute + kReadExecute, + // Set this when reserving memory that will later require kReadWriteExecute + // permissions. The resulting behavior is platform-specific, currently + // this is used to set the MAP_JIT flag on Apple Silicon. + // TODO(jkummerow): Remove this when Wasm has a platform-independent + // w^x implementation. + kNoAccessWillJitLater }; /** diff --git a/deps/v8/test/unittests/heap/unmapper-unittest.cc b/deps/v8/test/unittests/heap/unmapper-unittest.cc index bd476cd1ec1682..a919945d3f4ee7 100644 --- a/deps/v8/test/unittests/heap/unmapper-unittest.cc +++ b/deps/v8/test/unittests/heap/unmapper-unittest.cc @@ -170,6 +170,7 @@ class TrackingPageAllocator : public ::v8::PageAllocator { os << " page: [" << start << ", " << end << "), access: "; switch (access) { case PageAllocator::kNoAccess: + case PageAllocator::kNoAccessWillJitLater: os << "--"; break; case PageAllocator::kRead: From 4b7ba11d675a2309e63ba9d0e046aea362b88e65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20Andr=C3=A9=20Vadla=20Ravn=C3=A5s?= Date: Fri, 6 Nov 2020 02:46:21 +0100 Subject: [PATCH 19/98] deps: V8: cherry-pick 4e077ff0444a Original commit message: [mac] Set MAP_JIT only when necessary This is a "minimal" change to achieve the required goal: seeing that there is only one place where we need to indicate that memory should be reserved with MAP_JIT, we can add a value to the Permissions enum instead of adding a second, orthogonal parameter. That way we avoid changing public API functions, which makes this CL easier to undo once we have platform-independent w^x in Wasm. Bug: chromium:1117591 Change-Id: I6333d69ab29d5900c689f08dcc892a5f1c1159b8 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2435365 Commit-Queue: Jakob Kummerow Reviewed-by: Michael Lippautz Reviewed-by: Clemens Backes Cr-Commit-Position: refs/heads/master@{#70379} PR-URL: https://github.com/nodejs/node/pull/35986 Reviewed-By: Anna Henningsen Reviewed-By: Richard Lau Reviewed-By: Michael Dawson Reviewed-By: Rich Trott Reviewed-By: Beth Griggs Reviewed-By: Jiawen Geng --- common.gypi | 2 +- deps/v8/src/base/page-allocator.cc | 10 ++++++++++ deps/v8/src/base/platform/platform-cygwin.cc | 1 + deps/v8/src/base/platform/platform-fuchsia.cc | 1 + deps/v8/src/base/platform/platform-posix.cc | 12 +++++------- deps/v8/src/base/platform/platform-win32.cc | 1 + deps/v8/src/base/platform/platform.h | 5 ++++- deps/v8/src/utils/allocation.cc | 10 ++++++---- deps/v8/src/utils/allocation.h | 6 ++++-- deps/v8/src/wasm/wasm-code-manager.cc | 6 +++++- deps/v8/test/cctest/cctest.status | 1 + 11 files changed, 39 insertions(+), 16 deletions(-) diff --git a/common.gypi b/common.gypi index cae6662364984c..154ba97d9f46cf 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.19', + 'v8_embedder_string': '-node.20', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/base/page-allocator.cc b/deps/v8/src/base/page-allocator.cc index 98b2c690960336..62dfc94a83be07 100644 --- a/deps/v8/src/base/page-allocator.cc +++ b/deps/v8/src/base/page-allocator.cc @@ -21,6 +21,8 @@ STATIC_ASSERT_ENUM(PageAllocator::kReadWriteExecute, base::OS::MemoryPermission::kReadWriteExecute); STATIC_ASSERT_ENUM(PageAllocator::kReadExecute, base::OS::MemoryPermission::kReadExecute); +STATIC_ASSERT_ENUM(PageAllocator::kNoAccessWillJitLater, + base::OS::MemoryPermission::kNoAccessWillJitLater); #undef STATIC_ASSERT_ENUM @@ -38,6 +40,14 @@ void* PageAllocator::GetRandomMmapAddr() { void* PageAllocator::AllocatePages(void* hint, size_t size, size_t alignment, PageAllocator::Permission access) { +#if !(V8_OS_MACOSX && V8_HOST_ARCH_ARM64 && defined(MAP_JIT)) + // kNoAccessWillJitLater is only used on Apple Silicon. Map it to regular + // kNoAccess on other platforms, so code doesn't have to handle both enum + // values. + if (access == PageAllocator::kNoAccessWillJitLater) { + access = PageAllocator::kNoAccess; + } +#endif return base::OS::Allocate(hint, size, alignment, static_cast(access)); } diff --git a/deps/v8/src/base/platform/platform-cygwin.cc b/deps/v8/src/base/platform/platform-cygwin.cc index 92a5fbe490f4c3..b9da2f1cd592db 100644 --- a/deps/v8/src/base/platform/platform-cygwin.cc +++ b/deps/v8/src/base/platform/platform-cygwin.cc @@ -33,6 +33,7 @@ namespace { DWORD GetProtectionFromMemoryPermission(OS::MemoryPermission access) { switch (access) { case OS::MemoryPermission::kNoAccess: + case OS::MemoryPermission::kNoAccessWillJitLater: return PAGE_NOACCESS; case OS::MemoryPermission::kRead: return PAGE_READONLY; diff --git a/deps/v8/src/base/platform/platform-fuchsia.cc b/deps/v8/src/base/platform/platform-fuchsia.cc index fa175c39177aea..35a508a140ebd7 100644 --- a/deps/v8/src/base/platform/platform-fuchsia.cc +++ b/deps/v8/src/base/platform/platform-fuchsia.cc @@ -18,6 +18,7 @@ namespace { uint32_t GetProtectionFromMemoryPermission(OS::MemoryPermission access) { switch (access) { case OS::MemoryPermission::kNoAccess: + case OS::MemoryPermission::kNoAccessWillJitLater: return 0; // no permissions case OS::MemoryPermission::kRead: return ZX_VM_PERM_READ; diff --git a/deps/v8/src/base/platform/platform-posix.cc b/deps/v8/src/base/platform/platform-posix.cc index b765ad1897a3cf..4b49968baa053f 100644 --- a/deps/v8/src/base/platform/platform-posix.cc +++ b/deps/v8/src/base/platform/platform-posix.cc @@ -125,6 +125,7 @@ const int kMmapFdOffset = 0; int GetProtectionFromMemoryPermission(OS::MemoryPermission access) { switch (access) { case OS::MemoryPermission::kNoAccess: + case OS::MemoryPermission::kNoAccessWillJitLater: return PROT_NONE; case OS::MemoryPermission::kRead: return PROT_READ; @@ -151,15 +152,12 @@ int GetFlagsForMemoryPermission(OS::MemoryPermission access, #if V8_OS_QNX flags |= MAP_LAZY; #endif // V8_OS_QNX -#if V8_OS_MACOSX && V8_HOST_ARCH_ARM64 && defined(MAP_JIT) && \ - !defined(V8_OS_IOS) - // TODO(jkummerow): using the V8_OS_IOS define is a crude approximation - // of the fact that we don't want to set the MAP_JIT flag when - // FLAG_jitless == true, as src/base/ doesn't know any flags. - // TODO(crbug.com/1117591): This is only needed for code spaces. + } +#if V8_OS_MACOSX && V8_HOST_ARCH_ARM64 && defined(MAP_JIT) + if (access == OS::MemoryPermission::kNoAccessWillJitLater) { flags |= MAP_JIT; -#endif } +#endif return flags; } diff --git a/deps/v8/src/base/platform/platform-win32.cc b/deps/v8/src/base/platform/platform-win32.cc index 5db3e343103dd0..6be63dee137a81 100644 --- a/deps/v8/src/base/platform/platform-win32.cc +++ b/deps/v8/src/base/platform/platform-win32.cc @@ -753,6 +753,7 @@ namespace { DWORD GetProtectionFromMemoryPermission(OS::MemoryPermission access) { switch (access) { case OS::MemoryPermission::kNoAccess: + case OS::MemoryPermission::kNoAccessWillJitLater: return PAGE_NOACCESS; case OS::MemoryPermission::kRead: return PAGE_READONLY; diff --git a/deps/v8/src/base/platform/platform.h b/deps/v8/src/base/platform/platform.h index d5f59d1d7a8d8a..c4895a5b274374 100644 --- a/deps/v8/src/base/platform/platform.h +++ b/deps/v8/src/base/platform/platform.h @@ -167,7 +167,10 @@ class V8_BASE_EXPORT OS { kReadWrite, // TODO(hpayer): Remove this flag. Memory should never be rwx. kReadWriteExecute, - kReadExecute + kReadExecute, + // TODO(jkummerow): Remove this when Wasm has a platform-independent + // w^x implementation. + kNoAccessWillJitLater }; static bool HasLazyCommits(); diff --git a/deps/v8/src/utils/allocation.cc b/deps/v8/src/utils/allocation.cc index 6169acbfd6687a..022ac82ea6fa28 100644 --- a/deps/v8/src/utils/allocation.cc +++ b/deps/v8/src/utils/allocation.cc @@ -213,15 +213,17 @@ bool OnCriticalMemoryPressure(size_t length) { VirtualMemory::VirtualMemory() = default; VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size, - void* hint, size_t alignment) + void* hint, size_t alignment, JitPermission jit) : page_allocator_(page_allocator) { DCHECK_NOT_NULL(page_allocator); DCHECK(IsAligned(size, page_allocator_->CommitPageSize())); size_t page_size = page_allocator_->AllocatePageSize(); alignment = RoundUp(alignment, page_size); - Address address = reinterpret_cast
( - AllocatePages(page_allocator_, hint, RoundUp(size, page_size), alignment, - PageAllocator::kNoAccess)); + PageAllocator::Permission permissions = + jit == kMapAsJittable ? PageAllocator::kNoAccessWillJitLater + : PageAllocator::kNoAccess; + Address address = reinterpret_cast
(AllocatePages( + page_allocator_, hint, RoundUp(size, page_size), alignment, permissions)); if (address != kNullAddress) { DCHECK(IsAligned(address, alignment)); region_ = base::AddressRegion(address, size); diff --git a/deps/v8/src/utils/allocation.h b/deps/v8/src/utils/allocation.h index 7106b1c749a893..a82012310b8efe 100644 --- a/deps/v8/src/utils/allocation.h +++ b/deps/v8/src/utils/allocation.h @@ -156,6 +156,8 @@ V8_EXPORT_PRIVATE bool OnCriticalMemoryPressure(size_t length); // Represents and controls an area of reserved memory. class VirtualMemory final { public: + enum JitPermission { kNoJit, kMapAsJittable }; + // Empty VirtualMemory object, controlling no reserved memory. V8_EXPORT_PRIVATE VirtualMemory(); @@ -164,8 +166,8 @@ class VirtualMemory final { // size. The |size| must be aligned with |page_allocator|'s commit page size. // This may not be at the position returned by address(). V8_EXPORT_PRIVATE VirtualMemory(v8::PageAllocator* page_allocator, - size_t size, void* hint, - size_t alignment = 1); + size_t size, void* hint, size_t alignment = 1, + JitPermission jit = kNoJit); // Construct a virtual memory by assigning it some already mapped address // and size. diff --git a/deps/v8/src/wasm/wasm-code-manager.cc b/deps/v8/src/wasm/wasm-code-manager.cc index 8bc6a353405831..fc657d634dba7c 100644 --- a/deps/v8/src/wasm/wasm-code-manager.cc +++ b/deps/v8/src/wasm/wasm-code-manager.cc @@ -1610,7 +1610,11 @@ VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) { if (!BackingStore::ReserveAddressSpace(size)) return {}; if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr(); - VirtualMemory mem(page_allocator, size, hint, allocate_page_size); + // When we start exposing Wasm in jitless mode, then the jitless flag + // will have to determine whether we set kMapAsJittable or not. + DCHECK(!FLAG_jitless); + VirtualMemory mem(page_allocator, size, hint, allocate_page_size, + VirtualMemory::kMapAsJittable); if (!mem.IsReserved()) { BackingStore::ReleaseReservation(size); return {}; diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index 40580c08cd3157..1f93ec28c299ff 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -496,6 +496,7 @@ 'test-jump-table-assembler/*': [SKIP], 'test-gc/*': [SKIP], 'test-grow-memory/*': [SKIP], + 'test-liftoff-inspection/*': [SKIP], 'test-run-wasm-64/*': [SKIP], 'test-run-wasm-asmjs/*': [SKIP], 'test-run-wasm-atomics64/*': [SKIP], From 8ae3ffe2bea6f05b01e2256235ddadbd96899a8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20Andr=C3=A9=20Vadla=20Ravn=C3=A5s?= Date: Fri, 6 Nov 2020 02:46:24 +0100 Subject: [PATCH 20/98] deps: V8: cherry-pick 1d0f426311d4 Original commit message: [mac-arm64] Fix missing #include For an "#if defined(MAP_JIT)" test to work as expected, must be included in the compilation unit. Bug: chromium:1144200 Change-Id: Ia0bf35ec1872c02457f1fbc0ee6689c7f7d27d4a Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2517689 Reviewed-by: Clemens Backes Reviewed-by: Igor Sheludko Reviewed-by: Nico Weber Commit-Queue: Jakob Kummerow Cr-Commit-Position: refs/heads/master@{#70986} PR-URL: https://github.com/nodejs/node/pull/35986 Reviewed-By: Anna Henningsen Reviewed-By: Richard Lau Reviewed-By: Michael Dawson Reviewed-By: Rich Trott Reviewed-By: Beth Griggs Reviewed-By: Jiawen Geng --- common.gypi | 2 +- deps/v8/src/base/page-allocator.cc | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/common.gypi b/common.gypi index 154ba97d9f46cf..a3f155fde2861a 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.20', + 'v8_embedder_string': '-node.21', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/base/page-allocator.cc b/deps/v8/src/base/page-allocator.cc index 62dfc94a83be07..9f48ee79fe5424 100644 --- a/deps/v8/src/base/page-allocator.cc +++ b/deps/v8/src/base/page-allocator.cc @@ -6,6 +6,10 @@ #include "src/base/platform/platform.h" +#if V8_OS_MACOSX +#include // For MAP_JIT. +#endif + namespace v8 { namespace base { From 387d92fd0e468232579f0f863ca532242f9943bc Mon Sep 17 00:00:00 2001 From: rickyes <0x19951125@gmail.com> Date: Wed, 28 Oct 2020 15:14:48 +0800 Subject: [PATCH 21/98] http: onFinish will not be triggered again when finished PR-URL: https://github.com/nodejs/node/pull/35845 Fixes: https://github.com/nodejs/node/issues/35833 Reviewed-By: Robert Nagy Reviewed-By: Matteo Collina Reviewed-By: Rich Trott --- lib/_http_outgoing.js | 6 ++++++ test/parallel/test-http-outgoing-end-multiple.js | 10 +++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 14219857e5f899..1ff2de2aab868c 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -814,6 +814,12 @@ OutgoingMessage.prototype.end = function end(chunk, encoding, callback) { } if (chunk) { + if (this.finished) { + onError(this, + new ERR_STREAM_WRITE_AFTER_END(), + typeof callback !== 'function' ? nop : callback); + return this; + } write_(this, chunk, encoding, null, true); } else if (this.finished) { if (typeof callback === 'function') { diff --git a/test/parallel/test-http-outgoing-end-multiple.js b/test/parallel/test-http-outgoing-end-multiple.js index 7c43e1f59d5849..ed42c913375e84 100644 --- a/test/parallel/test-http-outgoing-end-multiple.js +++ b/test/parallel/test-http-outgoing-end-multiple.js @@ -3,9 +3,17 @@ const common = require('../common'); const assert = require('assert'); const http = require('http'); +const onWriteAfterEndError = common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); +}, 2); + const server = http.createServer(common.mustCall(function(req, res) { res.end('testing ended state', common.mustCall()); - res.end(common.mustCall()); + res.end(common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); + })); + res.end('end', onWriteAfterEndError); + res.on('error', onWriteAfterEndError); res.on('finish', common.mustCall(() => { res.end(common.mustCall((err) => { assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); From 56f83e687637a5aa291637eb7e9a5e97ea28069d Mon Sep 17 00:00:00 2001 From: raisinten Date: Thu, 22 Oct 2020 18:57:38 +0530 Subject: [PATCH 22/98] build: refactor configure.py to use argparse MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/pull/26725 Fixes: https://github.com/nodejs/node/issues/29813 Refs: https://github.com/nodejs/node/pull/29814 PR-URL: https://github.com/nodejs/node/pull/35755 Reviewed-By: Anna Henningsen Reviewed-By: Tobias Nießen Reviewed-By: Rich Trott Reviewed-By: Christian Clauss --- configure.py | 350 +++++++++++++++++++++++++++++---------------------- 1 file changed, 202 insertions(+), 148 deletions(-) diff --git a/configure.py b/configure.py index eb78598d9d32f0..50994c8c2772e2 100755 --- a/configure.py +++ b/configure.py @@ -3,7 +3,7 @@ import json import sys import errno -import optparse +import argparse import os import pipes import pprint @@ -41,7 +41,7 @@ from gyp_node import run_gyp # parse our options -parser = optparse.OptionParser() +parser = argparse.ArgumentParser() valid_os = ('win', 'mac', 'solaris', 'freebsd', 'openbsd', 'linux', 'android', 'aix', 'cloudabi') @@ -57,113 +57,125 @@ icu_versions = json.load(f) # create option groups -shared_optgroup = optparse.OptionGroup(parser, "Shared libraries", +shared_optgroup = parser.add_argument_group("Shared libraries", "Flags that allows you to control whether you want to build against " "built-in dependencies or its shared representations. If necessary, " "provide multiple libraries with comma.") -intl_optgroup = optparse.OptionGroup(parser, "Internationalization", +intl_optgroup = parser.add_argument_group("Internationalization", "Flags that lets you enable i18n features in Node.js as well as which " "library you want to build against.") -http2_optgroup = optparse.OptionGroup(parser, "HTTP2", +http2_optgroup = parser.add_argument_group("HTTP2", "Flags that allows you to control HTTP2 features in Node.js") # Options should be in alphabetical order but keep --prefix at the top, # that's arguably the one people will be looking for most. -parser.add_option('--prefix', +parser.add_argument('--prefix', action='store', dest='prefix', default='/usr/local', - help='select the install prefix [default: %default]') + help='select the install prefix [default: %(default)s]') -parser.add_option('--coverage', +parser.add_argument('--coverage', action='store_true', dest='coverage', + default=None, help='Build node with code coverage enabled') -parser.add_option('--debug', +parser.add_argument('--debug', action='store_true', dest='debug', + default=None, help='also build debug build') -parser.add_option('--debug-node', +parser.add_argument('--debug-node', action='store_true', dest='debug_node', + default=None, help='build the Node.js part of the binary with debugging symbols') -parser.add_option('--dest-cpu', +parser.add_argument('--dest-cpu', action='store', dest='dest_cpu', choices=valid_arch, help='CPU architecture to build for ({0})'.format(', '.join(valid_arch))) -parser.add_option('--cross-compiling', +parser.add_argument('--cross-compiling', action='store_true', dest='cross_compiling', default=None, help='force build to be considered as cross compiled') -parser.add_option('--no-cross-compiling', +parser.add_argument('--no-cross-compiling', action='store_false', dest='cross_compiling', default=None, help='force build to be considered as NOT cross compiled') -parser.add_option('--dest-os', +parser.add_argument('--dest-os', action='store', dest='dest_os', choices=valid_os, help='operating system to build for ({0})'.format(', '.join(valid_os))) -parser.add_option('--error-on-warn', +parser.add_argument('--error-on-warn', action='store_true', dest='error_on_warn', + default=None, help='Turn compiler warnings into errors for node core sources.') -parser.add_option('--experimental-quic', +parser.add_argument('--experimental-quic', action='store_true', dest='experimental_quic', + default=None, help='enable experimental quic support') -parser.add_option('--gdb', +parser.add_argument('--gdb', action='store_true', dest='gdb', + default=None, help='add gdb support') -parser.add_option('--no-ifaddrs', +parser.add_argument('--no-ifaddrs', action='store_true', dest='no_ifaddrs', + default=None, help='use on deprecated SunOS systems that do not support ifaddrs.h') -parser.add_option("--fully-static", +parser.add_argument("--fully-static", action="store_true", dest="fully_static", + default=None, help="Generate an executable without external dynamic libraries. This " "will not work on OSX when using the default compilation environment") -parser.add_option("--partly-static", +parser.add_argument("--partly-static", action="store_true", dest="partly_static", + default=None, help="Generate an executable with libgcc and libstdc++ libraries. This " "will not work on OSX when using the default compilation environment") -parser.add_option("--enable-pgo-generate", +parser.add_argument("--enable-pgo-generate", action="store_true", dest="enable_pgo_generate", + default=None, help="Enable profiling with pgo of a binary. This feature is only available " "on linux with gcc and g++ 5.4.1 or newer.") -parser.add_option("--enable-pgo-use", +parser.add_argument("--enable-pgo-use", action="store_true", dest="enable_pgo_use", + default=None, help="Enable use of the profile generated with --enable-pgo-generate. This " "feature is only available on linux with gcc and g++ 5.4.1 or newer.") -parser.add_option("--enable-lto", +parser.add_argument("--enable-lto", action="store_true", dest="enable_lto", + default=None, help="Enable compiling with lto of a binary. This feature is only available " "on linux with gcc and g++ 5.4.1 or newer.") -parser.add_option("--link-module", +parser.add_argument("--link-module", action="append", dest="linked_module", help="Path to a JS file to be bundled in the binary as a builtin. " @@ -171,334 +183,355 @@ "e.g. /root/x/y.js will be referenced via require('root/x/y'). " "Can be used multiple times") -parser.add_option('--openssl-default-cipher-list', +parser.add_argument('--openssl-default-cipher-list', action='store', dest='openssl_default_cipher_list', help='Use the specified cipher list as the default cipher list') -parser.add_option("--openssl-no-asm", +parser.add_argument("--openssl-no-asm", action="store_true", dest="openssl_no_asm", + default=None, help="Do not build optimized assembly for OpenSSL") -parser.add_option('--openssl-fips', +parser.add_argument('--openssl-fips', action='store', dest='openssl_fips', help='Build OpenSSL using FIPS canister .o file in supplied folder') -parser.add_option('--openssl-is-fips', +parser.add_argument('--openssl-is-fips', action='store_true', dest='openssl_is_fips', + default=None, help='specifies that the OpenSSL library is FIPS compatible') -parser.add_option('--openssl-use-def-ca-store', +parser.add_argument('--openssl-use-def-ca-store', action='store_true', dest='use_openssl_ca_store', + default=None, help='Use OpenSSL supplied CA store instead of compiled-in Mozilla CA copy.') -parser.add_option('--openssl-system-ca-path', +parser.add_argument('--openssl-system-ca-path', action='store', dest='openssl_system_ca_path', help='Use the specified path to system CA (PEM format) in addition to ' 'the OpenSSL supplied CA store or compiled-in Mozilla CA copy.') -parser.add_option('--experimental-http-parser', +parser.add_argument('--experimental-http-parser', action='store_true', dest='experimental_http_parser', + default=None, help='(no-op)') -shared_optgroup.add_option('--shared-http-parser', +shared_optgroup.add_argument('--shared-http-parser', action='store_true', dest='shared_http_parser', + default=None, help='link to a shared http_parser DLL instead of static linking') -shared_optgroup.add_option('--shared-http-parser-includes', +shared_optgroup.add_argument('--shared-http-parser-includes', action='store', dest='shared_http_parser_includes', help='directory containing http_parser header files') -shared_optgroup.add_option('--shared-http-parser-libname', +shared_optgroup.add_argument('--shared-http-parser-libname', action='store', dest='shared_http_parser_libname', default='http_parser', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-http-parser-libpath', +shared_optgroup.add_argument('--shared-http-parser-libpath', action='store', dest='shared_http_parser_libpath', help='a directory to search for the shared http_parser DLL') -shared_optgroup.add_option('--shared-libuv', +shared_optgroup.add_argument('--shared-libuv', action='store_true', dest='shared_libuv', + default=None, help='link to a shared libuv DLL instead of static linking') -shared_optgroup.add_option('--shared-libuv-includes', +shared_optgroup.add_argument('--shared-libuv-includes', action='store', dest='shared_libuv_includes', help='directory containing libuv header files') -shared_optgroup.add_option('--shared-libuv-libname', +shared_optgroup.add_argument('--shared-libuv-libname', action='store', dest='shared_libuv_libname', default='uv', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-libuv-libpath', +shared_optgroup.add_argument('--shared-libuv-libpath', action='store', dest='shared_libuv_libpath', help='a directory to search for the shared libuv DLL') -shared_optgroup.add_option('--shared-nghttp2', +shared_optgroup.add_argument('--shared-nghttp2', action='store_true', dest='shared_nghttp2', + default=None, help='link to a shared nghttp2 DLL instead of static linking') -shared_optgroup.add_option('--shared-nghttp2-includes', +shared_optgroup.add_argument('--shared-nghttp2-includes', action='store', dest='shared_nghttp2_includes', help='directory containing nghttp2 header files') -shared_optgroup.add_option('--shared-nghttp2-libname', +shared_optgroup.add_argument('--shared-nghttp2-libname', action='store', dest='shared_nghttp2_libname', default='nghttp2', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-nghttp2-libpath', +shared_optgroup.add_argument('--shared-nghttp2-libpath', action='store', dest='shared_nghttp2_libpath', help='a directory to search for the shared nghttp2 DLLs') -shared_optgroup.add_option('--shared-ngtcp2', +shared_optgroup.add_argument('--shared-ngtcp2', action='store_true', dest='shared_ngtcp2', + default=None, help='link to a shared ngtcp2 DLL instead of static linking') -shared_optgroup.add_option('--shared-ngtcp2-includes', +shared_optgroup.add_argument('--shared-ngtcp2-includes', action='store', dest='shared_ngtcp2_includes', help='directory containing ngtcp2 header files') -shared_optgroup.add_option('--shared-ngtcp2-libname', +shared_optgroup.add_argument('--shared-ngtcp2-libname', action='store', dest='shared_ngtcp2_libname', default='ngtcp2', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-ngtcp2-libpath', +shared_optgroup.add_argument('--shared-ngtcp2-libpath', action='store', dest='shared_ngtcp2_libpath', help='a directory to search for the shared ngtcp2 DLLs') -shared_optgroup.add_option('--shared-nghttp3', +shared_optgroup.add_argument('--shared-nghttp3', action='store_true', dest='shared_nghttp3', + default=None, help='link to a shared nghttp3 DLL instead of static linking') -shared_optgroup.add_option('--shared-nghttp3-includes', +shared_optgroup.add_argument('--shared-nghttp3-includes', action='store', dest='shared_nghttp3_includes', help='directory containing nghttp3 header files') -shared_optgroup.add_option('--shared-nghttp3-libname', +shared_optgroup.add_argument('--shared-nghttp3-libname', action='store', dest='shared_nghttp3_libname', default='nghttp3', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-nghttp3-libpath', +shared_optgroup.add_argument('--shared-nghttp3-libpath', action='store', dest='shared_nghttp3_libpath', help='a directory to search for the shared nghttp3 DLLs') -shared_optgroup.add_option('--shared-openssl', +shared_optgroup.add_argument('--shared-openssl', action='store_true', dest='shared_openssl', + default=None, help='link to a shared OpenSSl DLL instead of static linking') -shared_optgroup.add_option('--shared-openssl-includes', +shared_optgroup.add_argument('--shared-openssl-includes', action='store', dest='shared_openssl_includes', help='directory containing OpenSSL header files') -shared_optgroup.add_option('--shared-openssl-libname', +shared_optgroup.add_argument('--shared-openssl-libname', action='store', dest='shared_openssl_libname', default='crypto,ssl', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-openssl-libpath', +shared_optgroup.add_argument('--shared-openssl-libpath', action='store', dest='shared_openssl_libpath', help='a directory to search for the shared OpenSSL DLLs') -shared_optgroup.add_option('--shared-zlib', +shared_optgroup.add_argument('--shared-zlib', action='store_true', dest='shared_zlib', + default=None, help='link to a shared zlib DLL instead of static linking') -shared_optgroup.add_option('--shared-zlib-includes', +shared_optgroup.add_argument('--shared-zlib-includes', action='store', dest='shared_zlib_includes', help='directory containing zlib header files') -shared_optgroup.add_option('--shared-zlib-libname', +shared_optgroup.add_argument('--shared-zlib-libname', action='store', dest='shared_zlib_libname', default='z', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-zlib-libpath', +shared_optgroup.add_argument('--shared-zlib-libpath', action='store', dest='shared_zlib_libpath', help='a directory to search for the shared zlib DLL') -shared_optgroup.add_option('--shared-brotli', +shared_optgroup.add_argument('--shared-brotli', action='store_true', dest='shared_brotli', + default=None, help='link to a shared brotli DLL instead of static linking') -shared_optgroup.add_option('--shared-brotli-includes', +shared_optgroup.add_argument('--shared-brotli-includes', action='store', dest='shared_brotli_includes', help='directory containing brotli header files') -shared_optgroup.add_option('--shared-brotli-libname', +shared_optgroup.add_argument('--shared-brotli-libname', action='store', dest='shared_brotli_libname', default='brotlidec,brotlienc', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-brotli-libpath', +shared_optgroup.add_argument('--shared-brotli-libpath', action='store', dest='shared_brotli_libpath', help='a directory to search for the shared brotli DLL') -shared_optgroup.add_option('--shared-cares', +shared_optgroup.add_argument('--shared-cares', action='store_true', dest='shared_cares', + default=None, help='link to a shared cares DLL instead of static linking') -shared_optgroup.add_option('--shared-cares-includes', +shared_optgroup.add_argument('--shared-cares-includes', action='store', dest='shared_cares_includes', help='directory containing cares header files') -shared_optgroup.add_option('--shared-cares-libname', +shared_optgroup.add_argument('--shared-cares-libname', action='store', dest='shared_cares_libname', default='cares', - help='alternative lib name to link to [default: %default]') + help='alternative lib name to link to [default: %(default)s]') -shared_optgroup.add_option('--shared-cares-libpath', +shared_optgroup.add_argument('--shared-cares-libpath', action='store', dest='shared_cares_libpath', help='a directory to search for the shared cares DLL') -parser.add_option_group(shared_optgroup) +parser.add_argument_group(shared_optgroup) -parser.add_option('--systemtap-includes', +parser.add_argument('--systemtap-includes', action='store', dest='systemtap_includes', help='directory containing systemtap header files') -parser.add_option('--tag', +parser.add_argument('--tag', action='store', dest='tag', help='custom build tag') -parser.add_option('--release-urlbase', +parser.add_argument('--release-urlbase', action='store', dest='release_urlbase', help='Provide a custom URL prefix for the `process.release` properties ' '`sourceUrl` and `headersUrl`. When compiling a release build, this ' 'will default to https://nodejs.org/download/release/') -parser.add_option('--enable-d8', +parser.add_argument('--enable-d8', action='store_true', dest='enable_d8', - help=optparse.SUPPRESS_HELP) # Unsupported, undocumented. + default=None, + help=argparse.SUPPRESS) # Unsupported, undocumented. -parser.add_option('--enable-trace-maps', +parser.add_argument('--enable-trace-maps', action='store_true', dest='trace_maps', + default=None, help='Enable the --trace-maps flag in V8 (use at your own risk)') -parser.add_option('--experimental-enable-pointer-compression', +parser.add_argument('--experimental-enable-pointer-compression', action='store_true', dest='enable_pointer_compression', + default=None, help='[Experimental] Enable V8 pointer compression (limits max heap to 4GB and breaks ABI compatibility)') -parser.add_option('--v8-options', +parser.add_argument('--v8-options', action='store', dest='v8_options', help='v8 options to pass, see `node --v8-options` for examples.') -parser.add_option('--with-ossfuzz', +parser.add_argument('--with-ossfuzz', action='store_true', dest='ossfuzz', + default=None, help='Enables building of fuzzers. This command should be run in an OSS-Fuzz Docker image.') -parser.add_option('--with-arm-float-abi', +parser.add_argument('--with-arm-float-abi', action='store', dest='arm_float_abi', choices=valid_arm_float_abi, help='specifies which floating-point ABI to use ({0}).'.format( ', '.join(valid_arm_float_abi))) -parser.add_option('--with-arm-fpu', +parser.add_argument('--with-arm-fpu', action='store', dest='arm_fpu', choices=valid_arm_fpu, - help='ARM FPU mode ({0}) [default: %default]'.format( + help='ARM FPU mode ({0}) [default: %(default)s]'.format( ', '.join(valid_arm_fpu))) -parser.add_option('--with-mips-arch-variant', +parser.add_argument('--with-mips-arch-variant', action='store', dest='mips_arch_variant', default='r2', choices=valid_mips_arch, - help='MIPS arch variant ({0}) [default: %default]'.format( + help='MIPS arch variant ({0}) [default: %(default)s]'.format( ', '.join(valid_mips_arch))) -parser.add_option('--with-mips-fpu-mode', +parser.add_argument('--with-mips-fpu-mode', action='store', dest='mips_fpu_mode', default='fp32', choices=valid_mips_fpu, - help='MIPS FPU mode ({0}) [default: %default]'.format( + help='MIPS FPU mode ({0}) [default: %(default)s]'.format( ', '.join(valid_mips_fpu))) -parser.add_option('--with-mips-float-abi', +parser.add_argument('--with-mips-float-abi', action='store', dest='mips_float_abi', default='hard', choices=valid_mips_float_abi, - help='MIPS floating-point ABI ({0}) [default: %default]'.format( + help='MIPS floating-point ABI ({0}) [default: %(default)s]'.format( ', '.join(valid_mips_float_abi))) -parser.add_option('--with-dtrace', +parser.add_argument('--with-dtrace', action='store_true', dest='with_dtrace', + default=None, help='build with DTrace (default is true on sunos and darwin)') -parser.add_option('--with-etw', +parser.add_argument('--with-etw', action='store_true', dest='with_etw', + default=None, help='build with ETW (default is true on Windows)') -parser.add_option('--use-largepages', +parser.add_argument('--use-largepages', action='store_true', dest='node_use_large_pages', + default=None, help='This option has no effect. --use-largepages is now a runtime option.') -parser.add_option('--use-largepages-script-lld', +parser.add_argument('--use-largepages-script-lld', action='store_true', dest='node_use_large_pages_script_lld', + default=None, help='This option has no effect. --use-largepages is now a runtime option.') -parser.add_option('--use-section-ordering-file', +parser.add_argument('--use-section-ordering-file', action='store', dest='node_section_ordering_info', default='', @@ -506,42 +539,42 @@ 'Node.js be linked using the gold linker. The gold linker must have ' + 'version 1.2 or greater.') -intl_optgroup.add_option('--with-intl', +intl_optgroup.add_argument('--with-intl', action='store', dest='with_intl', default='full-icu', choices=valid_intl_modes, - help='Intl mode (valid choices: {0}) [default: %default]'.format( + help='Intl mode (valid choices: {0}) [default: %(default)s]'.format( ', '.join(valid_intl_modes))) -intl_optgroup.add_option('--without-intl', +intl_optgroup.add_argument('--without-intl', action='store_const', dest='with_intl', const='none', help='Disable Intl, same as --with-intl=none (disables inspector)') -intl_optgroup.add_option('--with-icu-path', +intl_optgroup.add_argument('--with-icu-path', action='store', dest='with_icu_path', help='Path to icu.gyp (ICU i18n, Chromium version only.)') icu_default_locales='root,en' -intl_optgroup.add_option('--with-icu-locales', +intl_optgroup.add_argument('--with-icu-locales', action='store', dest='with_icu_locales', default=icu_default_locales, help='Comma-separated list of locales for "small-icu". "root" is assumed. ' - '[default: %default]') + '[default: %(default)s]') -intl_optgroup.add_option('--with-icu-source', +intl_optgroup.add_argument('--with-icu-source', action='store', dest='with_icu_source', help='Intl mode: optional local path to icu/ dir, or path/URL of ' 'the icu4c source archive. ' 'v%d.x or later recommended.' % icu_versions['minimum_icu']) -intl_optgroup.add_option('--with-icu-default-data-dir', +intl_optgroup.add_argument('--with-icu-default-data-dir', action='store', dest='with_icu_default_data_dir', help='Path to the icuXXdt{lb}.dat file. If unspecified, ICU data will ' @@ -549,159 +582,179 @@ '--icu-data-dir runtime argument is used. This option has effect ' 'only when Node.js is built with --with-intl=small-icu.') -parser.add_option('--with-ltcg', +parser.add_argument('--with-ltcg', action='store_true', dest='with_ltcg', + default=None, help='Use Link Time Code Generation. This feature is only available on Windows.') -parser.add_option('--without-node-snapshot', +parser.add_argument('--without-node-snapshot', action='store_true', dest='without_node_snapshot', + default=None, help='Turn off V8 snapshot integration. Currently experimental.') -parser.add_option('--without-node-code-cache', +parser.add_argument('--without-node-code-cache', action='store_true', dest='without_node_code_cache', + default=None, help='Turn off V8 Code cache integration.') -intl_optgroup.add_option('--download', +intl_optgroup.add_argument('--download', action='store', dest='download_list', help=nodedownload.help()) -intl_optgroup.add_option('--download-path', +intl_optgroup.add_argument('--download-path', action='store', dest='download_path', default='deps', - help='Download directory [default: %default]') + help='Download directory [default: %(default)s]') -parser.add_option_group(intl_optgroup) +parser.add_argument_group(intl_optgroup) -parser.add_option('--debug-lib', +parser.add_argument('--debug-lib', action='store_true', dest='node_debug_lib', + default=None, help='build lib with DCHECK macros') -http2_optgroup.add_option('--debug-nghttp2', +http2_optgroup.add_argument('--debug-nghttp2', action='store_true', dest='debug_nghttp2', + default=None, help='build nghttp2 with DEBUGBUILD (default is false)') -parser.add_option_group(http2_optgroup) +parser.add_argument_group(http2_optgroup) -parser.add_option('--without-dtrace', +parser.add_argument('--without-dtrace', action='store_true', dest='without_dtrace', + default=None, help='build without DTrace') -parser.add_option('--without-etw', +parser.add_argument('--without-etw', action='store_true', dest='without_etw', + default=None, help='build without ETW') -parser.add_option('--without-npm', +parser.add_argument('--without-npm', action='store_true', dest='without_npm', + default=None, help='do not install the bundled npm (package manager)') # Dummy option for backwards compatibility -parser.add_option('--without-report', +parser.add_argument('--without-report', action='store_true', dest='unused_without_report', - help=optparse.SUPPRESS_HELP) + default=None, + help=argparse.SUPPRESS) -parser.add_option('--with-snapshot', +parser.add_argument('--with-snapshot', action='store_true', dest='unused_with_snapshot', - help=optparse.SUPPRESS_HELP) + default=None, + help=argparse.SUPPRESS) -parser.add_option('--without-snapshot', +parser.add_argument('--without-snapshot', action='store_true', dest='unused_without_snapshot', - help=optparse.SUPPRESS_HELP) + default=None, + help=argparse.SUPPRESS) -parser.add_option('--without-siphash', +parser.add_argument('--without-siphash', action='store_true', dest='without_siphash', - help=optparse.SUPPRESS_HELP) + default=None, + help=argparse.SUPPRESS) # End dummy list. -parser.add_option('--without-ssl', +parser.add_argument('--without-ssl', action='store_true', dest='without_ssl', + default=None, help='build without SSL (disables crypto, https, inspector, etc.)') -parser.add_option('--without-node-options', +parser.add_argument('--without-node-options', action='store_true', dest='without_node_options', + default=None, help='build without NODE_OPTIONS support') -parser.add_option('--ninja', +parser.add_argument('--ninja', action='store_true', dest='use_ninja', + default=None, help='generate build files for use with Ninja') -parser.add_option('--enable-asan', +parser.add_argument('--enable-asan', action='store_true', dest='enable_asan', + default=None, help='compile for Address Sanitizer to find memory bugs') -parser.add_option('--enable-static', +parser.add_argument('--enable-static', action='store_true', dest='enable_static', + default=None, help='build as static library') -parser.add_option('--no-browser-globals', +parser.add_argument('--no-browser-globals', action='store_true', dest='no_browser_globals', + default=None, help='do not export browser globals like setTimeout, console, etc. ' + '(This mode is not officially supported for regular applications)') -parser.add_option('--without-inspector', +parser.add_argument('--without-inspector', action='store_true', dest='without_inspector', + default=None, help='disable the V8 inspector protocol') -parser.add_option('--shared', +parser.add_argument('--shared', action='store_true', dest='shared', + default=None, help='compile shared library for embedding node in another project. ' + '(This mode is not officially supported for regular applications)') -parser.add_option('--without-v8-platform', +parser.add_argument('--without-v8-platform', action='store_true', dest='without_v8_platform', default=False, help='do not initialize v8 platform during node.js startup. ' + '(This mode is not officially supported for regular applications)') -parser.add_option('--without-bundled-v8', +parser.add_argument('--without-bundled-v8', action='store_true', dest='without_bundled_v8', default=False, help='do not use V8 includes from the bundled deps folder. ' + '(This mode is not officially supported for regular applications)') -parser.add_option('--verbose', +parser.add_argument('--verbose', action='store_true', dest='verbose', default=False, help='get more output from this script') -parser.add_option('--v8-non-optimized-debug', +parser.add_argument('--v8-non-optimized-debug', action='store_true', dest='v8_non_optimized_debug', default=False, help='compile V8 with minimal optimizations and with runtime checks') -parser.add_option('--v8-with-dchecks', +parser.add_argument('--v8-with-dchecks', action='store_true', dest='v8_with_dchecks', default=False, help='compile V8 with debug checks and runtime debugging features enabled') -parser.add_option('--v8-lite-mode', +parser.add_argument('--v8-lite-mode', action='store_true', dest='v8_lite_mode', default=False, @@ -709,25 +762,26 @@ 'memory footprint, but also implies no just-in-time compilation ' + 'support, thus much slower execution)') -parser.add_option('--v8-enable-object-print', +parser.add_argument('--v8-enable-object-print', action='store_true', dest='v8_enable_object_print', default=True, help='compile V8 with auxiliar functions for native debuggers') -parser.add_option('--node-builtin-modules-path', +parser.add_argument('--node-builtin-modules-path', action='store', dest='node_builtin_modules_path', default=False, help='node will load builtin modules from disk instead of from binary') # Create compile_commands.json in out/Debug and out/Release. -parser.add_option('-C', +parser.add_argument('-C', action='store_true', dest='compile_commands_json', - help=optparse.SUPPRESS_HELP) + default=None, + help=argparse.SUPPRESS) -(options, args) = parser.parse_args() +(options, args) = parser.parse_known_args() # Expand ~ in the install prefix now, it gets written to multiple files. options.prefix = os.path.expanduser(options.prefix or '') From 5729478509a11f53afe65fa1b569d920e86d1803 Mon Sep 17 00:00:00 2001 From: Gireesh Punathil Date: Mon, 2 Nov 2020 23:45:09 -0500 Subject: [PATCH 23/98] src: add loop idle time in diagnostic report Add libuv's cumulative idle time in the diagnostic report. Add the data under the libuv's loop section Refs: https://github.com/nodejs/node/pull/34938 PR-URL: https://github.com/nodejs/node/pull/35940 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott Reviewed-By: Richard Lau --- doc/api/report.md | 3 ++- src/node_report.cc | 4 ++++ test/report/test-report-uv-handles.js | 3 +++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/api/report.md b/doc/api/report.md index 9ee65280a8aa41..accf3ca21044fd 100644 --- a/doc/api/report.md +++ b/doc/api/report.md @@ -292,7 +292,8 @@ is provided below for reference. { "type": "loop", "is_active": true, - "address": "0x000055fc7b2cb180" + "address": "0x000055fc7b2cb180", + "loopIdleTimeSeconds": 22644.8 } ], "workers": [], diff --git a/src/node_report.cc b/src/node_report.cc index 3b97bb705b6985..13f87b1e52e5d6 100644 --- a/src/node_report.cc +++ b/src/node_report.cc @@ -294,6 +294,10 @@ static void WriteNodeReport(Isolate* isolate, static_cast(uv_loop_alive(env->event_loop()))); writer.json_keyvalue("address", ValueToHexString(reinterpret_cast(env->event_loop()))); + + // Report Event loop idle time + uint64_t idle_time = uv_metrics_idle_time(env->event_loop()); + writer.json_keyvalue("loopIdleTimeSeconds", 1.0 * idle_time / 1e9); writer.json_end(); } diff --git a/test/report/test-report-uv-handles.js b/test/report/test-report-uv-handles.js index 3a6a34a8573fe7..d2dd630c46d25f 100644 --- a/test/report/test-report-uv-handles.js +++ b/test/report/test-report-uv-handles.js @@ -128,6 +128,9 @@ if (process.argv[2] === 'child') { assert.strictEqual(handle.filename, expected_filename); assert(handle.is_referenced); }), + loop: common.mustCall(function loop_validator(handle) { + assert.strictEqual(typeof handle.loopIdleTimeSeconds, 'number'); + }), pipe: common.mustCallAtLeast(function pipe_validator(handle) { assert(handle.is_referenced); }), From 841a2812d0c241619ee4aa2517b2b46691eaed61 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 10 Nov 2020 06:09:08 -0800 Subject: [PATCH 24/98] doc: fix typo in debugger.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use apostrophe for possessive. PR-URL: https://github.com/nodejs/node/pull/36066 Reviewed-By: Antoine du Hamel Reviewed-By: Benjamin Gruenbaum Reviewed-By: Gireesh Punathil Reviewed-By: Luigi Pinca Reviewed-By: Juan José Arboleda --- doc/api/debugger.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/debugger.md b/doc/api/debugger.md index b6dd45c7000b46..f17750ac116302 100644 --- a/doc/api/debugger.md +++ b/doc/api/debugger.md @@ -115,7 +115,7 @@ To begin watching an expression, type `watch('my_expression')`. The command * `setBreakpoint()`, `sb()`: Set breakpoint on current line * `setBreakpoint(line)`, `sb(line)`: Set breakpoint on specific line * `setBreakpoint('fn()')`, `sb(...)`: Set breakpoint on a first statement in - functions body + function's body * `setBreakpoint('script.js', 1)`, `sb(...)`: Set breakpoint on first line of `script.js` * `setBreakpoint('script.js', 1, 'num < 4')`, `sb(...)`: Set conditional From 6a4cc43028053da4aff2090b6b9c02807a22c255 Mon Sep 17 00:00:00 2001 From: Aleksandr Krutko Date: Mon, 9 Nov 2020 22:05:00 +0200 Subject: [PATCH 25/98] test: replace var with const Replace `var` with `const` in vm context for test-util-inspect.js. PR-URL: https://github.com/nodejs/node/pull/36069 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Daijiro Wachi Reviewed-By: Rich Trott Reviewed-By: Gireesh Punathil --- test/parallel/test-util-inspect.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/parallel/test-util-inspect.js b/test/parallel/test-util-inspect.js index 70f2ff64328b64..4d7232b9fe51b0 100644 --- a/test/parallel/test-util-inspect.js +++ b/test/parallel/test-util-inspect.js @@ -583,9 +583,9 @@ assert.strictEqual(util.inspect(-5e-324), '-5e-324'); { let obj = vm.runInNewContext('(function(){return {}})()', {}); assert.strictEqual(util.inspect(obj), '{}'); - obj = vm.runInNewContext('var m=new Map();m.set(1,2);m', {}); + obj = vm.runInNewContext('const m=new Map();m.set(1,2);m', {}); assert.strictEqual(util.inspect(obj), 'Map(1) { 1 => 2 }'); - obj = vm.runInNewContext('var s=new Set();s.add(1);s.add(2);s', {}); + obj = vm.runInNewContext('const s=new Set();s.add(1);s.add(2);s', {}); assert.strictEqual(util.inspect(obj), 'Set(2) { 1, 2 }'); obj = vm.runInNewContext('fn=function(){};new Promise(fn,fn)', {}); assert.strictEqual(util.inspect(obj), 'Promise { }'); From fbe210b2a1be6635917fcf5d66e5d92b15e22e17 Mon Sep 17 00:00:00 2001 From: Brian Ingenito <28159742+bingenito@users.noreply.github.com> Date: Fri, 6 Nov 2020 16:18:36 -0500 Subject: [PATCH 26/98] build: conditionally clear vcinstalldir For scenario where target env is explicitly specified as vs2019, do not clear VCINSTALLDIR which was being cleared to handle fallback to vs2017 block when attempting to find a matching available VS. Fixes: https://github.com/nodejs/node/issues/35856 PR-URL: https://github.com/nodejs/node/pull/36009 Reviewed-By: Richard Lau Reviewed-By: Rich Trott --- vcbuild.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vcbuild.bat b/vcbuild.bat index 86a986b3dec2f3..179da60d2646a2 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -249,7 +249,7 @@ echo Looking for Visual Studio 2019 @rem VCINSTALLDIR may be set if run from a VS Command Prompt and needs to be @rem cleared first as vswhere_usability_wrapper.cmd doesn't when it fails to @rem detect the version searched for -set "VCINSTALLDIR=" +if not defined target_env set "VCINSTALLDIR=" call tools\msvs\vswhere_usability_wrapper.cmd "[16.0,17.0)" if "_%VCINSTALLDIR%_" == "__" goto msbuild-not-found set "WIXSDKDIR=%WIX%\SDK\VS2017" From 122797e87fe0535fb7e682ac3dc69088cce621c7 Mon Sep 17 00:00:00 2001 From: Yash Ladha Date: Wed, 9 Sep 2020 21:08:20 +0530 Subject: [PATCH 27/98] src: remove duplicate logic for getting buffer We were fetching the buffer from the float array to send out the response in js land, however that logic is being duplicated in node_process.h. Now they will be using an inline to fetch the array buffers and making it more generic. PR-URL: https://github.com/nodejs/node/pull/34553 Reviewed-By: Anna Henningsen Reviewed-By: Joyee Cheung --- src/node_process_methods.cc | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index 6e7b1c929468c9..4a6d767f9b758c 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -89,6 +89,16 @@ static void Chdir(const FunctionCallbackInfo& args) { } } +inline Local get_fields_array_buffer( + const FunctionCallbackInfo& args, + size_t index, + size_t array_length) { + CHECK(args[index]->IsFloat64Array()); + Local arr = args[index].As(); + CHECK_EQ(arr->Length(), array_length); + return arr->Buffer(); +} + // CPUUsage use libuv's uv_getrusage() this-process resource usage accessor, // to access ru_utime (user CPU time used) and ru_stime (system CPU time used), // which are uv_timeval_t structs (long tv_sec, long tv_usec). @@ -104,10 +114,7 @@ static void CPUUsage(const FunctionCallbackInfo& args) { return env->ThrowUVException(err, "uv_getrusage"); // Get the double array pointer from the Float64Array argument. - CHECK(args[0]->IsFloat64Array()); - Local array = args[0].As(); - CHECK_EQ(array->Length(), 2); - Local ab = array->Buffer(); + Local ab = get_fields_array_buffer(args, 0, 2); double* fields = static_cast(ab->GetBackingStore()->Data()); // Set the Float64Array elements to be user / system values in microseconds. @@ -174,10 +181,7 @@ static void MemoryUsage(const FunctionCallbackInfo& args) { env->isolate_data()->node_allocator(); // Get the double array pointer from the Float64Array argument. - CHECK(args[0]->IsFloat64Array()); - Local array = args[0].As(); - CHECK_EQ(array->Length(), 5); - Local ab = array->Buffer(); + Local ab = get_fields_array_buffer(args, 0, 5); double* fields = static_cast(ab->GetBackingStore()->Data()); fields[0] = rss; @@ -263,10 +267,7 @@ static void ResourceUsage(const FunctionCallbackInfo& args) { if (err) return env->ThrowUVException(err, "uv_getrusage"); - CHECK(args[0]->IsFloat64Array()); - Local array = args[0].As(); - CHECK_EQ(array->Length(), 16); - Local ab = array->Buffer(); + Local ab = get_fields_array_buffer(args, 0, 16); double* fields = static_cast(ab->GetBackingStore()->Data()); fields[0] = MICROS_PER_SEC * rusage.ru_utime.tv_sec + rusage.ru_utime.tv_usec; From 34aa0c868ef65dbcf2990b125c9931910f98d93f Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Fri, 6 Nov 2020 15:20:06 +0100 Subject: [PATCH 28/98] assert: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/35998 Reviewed-By: Rich Trott --- lib/internal/assert/assertion_error.js | 52 +++++++++++++++----------- lib/internal/assert/calltracker.js | 9 +++-- 2 files changed, 36 insertions(+), 25 deletions(-) diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index ff2d2dbc617557..c13a9d1ca1c3d8 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -1,6 +1,8 @@ 'use strict'; const { + ArrayPrototypeJoin, + ArrayPrototypePop, Error, ErrorCaptureStackTrace, MathMax, @@ -9,6 +11,10 @@ const { ObjectGetPrototypeOf, ObjectKeys, String, + StringPrototypeEndsWith, + StringPrototypeRepeat, + StringPrototypeSlice, + StringPrototypeSplit, } = primordials; const { inspect } = require('internal/util/inspect'); @@ -79,8 +85,8 @@ function createErrDiff(actual, expected, operator) { let end = ''; let skipped = false; const actualInspected = inspectValue(actual); - const actualLines = actualInspected.split('\n'); - const expectedLines = inspectValue(expected).split('\n'); + const actualLines = StringPrototypeSplit(actualInspected, '\n'); + const expectedLines = StringPrototypeSplit(inspectValue(expected), '\n'); let i = 0; let indicator = ''; @@ -127,7 +133,7 @@ function createErrDiff(actual, expected, operator) { if (i > 2) { // Add position indicator for the first mismatch in case it is a // single line and the input length is less than the column length. - indicator = `\n ${' '.repeat(i)}^`; + indicator = `\n ${StringPrototypeRepeat(' ', i)}^`; i = 0; } } @@ -144,8 +150,8 @@ function createErrDiff(actual, expected, operator) { } else { other = a; } - actualLines.pop(); - expectedLines.pop(); + ArrayPrototypePop(actualLines); + ArrayPrototypePop(expectedLines); if (actualLines.length === 0 || expectedLines.length === 0) break; a = actualLines[actualLines.length - 1]; @@ -157,18 +163,19 @@ function createErrDiff(actual, expected, operator) { // E.g., assert.deepStrictEqual({ a: Symbol() }, { a: Symbol() }) if (maxLines === 0) { // We have to get the result again. The lines were all removed before. - const actualLines = actualInspected.split('\n'); + const actualLines = StringPrototypeSplit(actualInspected, '\n'); // Only remove lines in case it makes sense to collapse those. // TODO: Accept env to always show the full error. if (actualLines.length > 50) { actualLines[46] = `${blue}...${white}`; while (actualLines.length > 47) { - actualLines.pop(); + ArrayPrototypePop(actualLines); } } - return `${kReadableOperator.notIdentical}\n\n${actualLines.join('\n')}\n`; + return `${kReadableOperator.notIdentical}\n\n` + + `${ArrayPrototypeJoin(actualLines, '\n')}\n`; } // There were at least five identical lines at the end. Mark a couple of @@ -235,9 +242,10 @@ function createErrDiff(actual, expected, operator) { // If the lines diverge, specifically check for lines that only diverge by // a trailing comma. In that case it is actually identical and we should // mark it as such. - let divergingLines = actualLine !== expectedLine && - (!actualLine.endsWith(',') || - actualLine.slice(0, -1) !== expectedLine); + let divergingLines = + actualLine !== expectedLine && + (!StringPrototypeEndsWith(actualLine, ',') || + StringPrototypeSlice(actualLine, 0, -1) !== expectedLine); // If the expected line has a trailing comma but is otherwise identical, // add a comma at the end of the actual line. Otherwise the output could // look weird as in: @@ -248,8 +256,8 @@ function createErrDiff(actual, expected, operator) { // ] // if (divergingLines && - expectedLine.endsWith(',') && - expectedLine.slice(0, -1) === actualLine) { + StringPrototypeEndsWith(expectedLine, ',') && + StringPrototypeSlice(expectedLine, 0, -1) === actualLine) { divergingLines = false; actualLine += ','; } @@ -362,7 +370,7 @@ class AssertionError extends Error { // In case the objects are equal but the operator requires unequal, show // the first object and say A equals B let base = kReadableOperator[operator]; - const res = inspectValue(actual).split('\n'); + const res = StringPrototypeSplit(inspectValue(actual), '\n'); // In case "actual" is an object or a function, it should not be // reference equal. @@ -377,7 +385,7 @@ class AssertionError extends Error { if (res.length > 50) { res[46] = `${blue}...${white}`; while (res.length > 47) { - res.pop(); + ArrayPrototypePop(res); } } @@ -385,7 +393,7 @@ class AssertionError extends Error { if (res.length === 1) { super(`${base}${res[0].length > 5 ? '\n\n' : ' '}${res[0]}`); } else { - super(`${base}\n\n${res.join('\n')}\n`); + super(`${base}\n\n${ArrayPrototypeJoin(res, '\n')}\n`); } } else { let res = inspectValue(actual); @@ -394,15 +402,15 @@ class AssertionError extends Error { if (operator === 'notDeepEqual' && res === other) { res = `${knownOperator}\n\n${res}`; if (res.length > 1024) { - res = `${res.slice(0, 1021)}...`; + res = `${StringPrototypeSlice(res, 0, 1021)}...`; } super(res); } else { if (res.length > 512) { - res = `${res.slice(0, 509)}...`; + res = `${StringPrototypeSlice(res, 0, 509)}...`; } if (other.length > 512) { - other = `${other.slice(0, 509)}...`; + other = `${StringPrototypeSlice(other, 0, 509)}...`; } if (operator === 'deepEqual') { res = `${knownOperator}\n\n${res}\n\nshould loosely deep-equal\n\n`; @@ -463,12 +471,12 @@ class AssertionError extends Error { for (const name of ['actual', 'expected']) { if (typeof this[name] === 'string') { - const lines = this[name].split('\n'); + const lines = StringPrototypeSplit(this[name], '\n'); if (lines.length > 10) { lines.length = 10; - this[name] = `${lines.join('\n')}\n...`; + this[name] = `${ArrayPrototypeJoin(lines, '\n')}\n...`; } else if (this[name].length > 512) { - this[name] = `${this[name].slice(512)}...`; + this[name] = `${StringPrototypeSlice(this[name], 512)}...`; } } } diff --git a/lib/internal/assert/calltracker.js b/lib/internal/assert/calltracker.js index 74f517f3f9e99b..d45fb67d611e8b 100644 --- a/lib/internal/assert/calltracker.js +++ b/lib/internal/assert/calltracker.js @@ -1,7 +1,10 @@ 'use strict'; const { + ArrayPrototypePush, Error, + FunctionPrototype, + ReflectApply, SafeSet, } = primordials; @@ -15,7 +18,7 @@ const { validateUint32, } = require('internal/validators'); -const noop = () => {}; +const noop = FunctionPrototype; class CallTracker { @@ -55,7 +58,7 @@ class CallTracker { if (context.actual === context.exact + 1) { callChecks.add(context); } - return fn.apply(this, arguments); + return ReflectApply(fn, this, arguments); }; } @@ -67,7 +70,7 @@ class CallTracker { const message = `Expected the ${context.name} function to be ` + `executed ${context.exact} time(s) but was ` + `executed ${context.actual} time(s).`; - errors.push({ + ArrayPrototypePush(errors, { message, actual: context.actual, expected: context.exact, From 0b7082246159ec2b2f601469dfc9d1daa899424f Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Fri, 6 Nov 2020 18:49:07 +0100 Subject: [PATCH 29/98] child_process: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36003 Reviewed-By: Rich Trott --- lib/internal/child_process/serialization.js | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/internal/child_process/serialization.js b/lib/internal/child_process/serialization.js index df8a6ca67236c5..51c8efc1f21f45 100644 --- a/lib/internal/child_process/serialization.js +++ b/lib/internal/child_process/serialization.js @@ -3,7 +3,9 @@ const { JSONParse, JSONStringify, + StringPrototypeSplit, Symbol, + TypedArrayPrototypeSubarray, } = primordials; const { Buffer } = require('buffer'); const { StringDecoder } = require('string_decoder'); @@ -63,8 +65,8 @@ const advanced = { } const deserializer = new ChildProcessDeserializer( - messageBuffer.subarray(4, 4 + size)); - messageBuffer = messageBuffer.subarray(4 + size); + TypedArrayPrototypeSubarray(messageBuffer, 4, 4 + size)); + messageBuffer = TypedArrayPrototypeSubarray(messageBuffer, 4 + size); deserializer.readHeader(); yield deserializer.readValue(); @@ -98,7 +100,8 @@ const json = { if (channel[kStringDecoder] === undefined) channel[kStringDecoder] = new StringDecoder('utf8'); - const chunks = channel[kStringDecoder].write(readData).split('\n'); + const chunks = + StringPrototypeSplit(channel[kStringDecoder].write(readData), '\n'); const numCompleteChunks = chunks.length - 1; // Last line does not have trailing linebreak const incompleteChunk = chunks[numCompleteChunks]; From 63a138e02f3581b457c483e4f2f0651065f3037c Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 11 Nov 2020 18:55:03 +0100 Subject: [PATCH 30/98] crypto: fix passing TypedArray to webcrypto AES methods MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://www.w3.org/TR/WebCryptoAPI/#subtlecrypto-interface Fixes: https://github.com/nodejs/node/issues/36083 PR-URL: https://github.com/nodejs/node/pull/36087 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Tobias Nießen --- lib/internal/crypto/aes.js | 8 +++- .../test-webcrypto-encrypt-decrypt-aes.js | 40 ++++++++++++++++++- 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index f0814c725469e6..ab3ee099cd0d69 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -1,6 +1,7 @@ 'use strict'; const { + ArrayBufferIsView, ArrayBufferPrototypeSlice, ArrayFrom, ArrayPrototypeIncludes, @@ -8,6 +9,7 @@ const { MathFloor, Promise, SafeSet, + TypedArrayPrototypeSlice, } = primordials; const { @@ -183,8 +185,10 @@ function asyncAesGcmCipher( let tag; switch (mode) { case kWebCryptoCipherDecrypt: - tag = ArrayBufferPrototypeSlice(data, -tagByteLength); - data = ArrayBufferPrototypeSlice(data, 0, -tagByteLength); + const slice = ArrayBufferIsView(data) ? + TypedArrayPrototypeSlice : ArrayBufferPrototypeSlice; + tag = slice(data, -tagByteLength); + data = slice(data, 0, -tagByteLength); break; case kWebCryptoCipherEncrypt: tag = tagByteLength; diff --git a/test/parallel/test-webcrypto-encrypt-decrypt-aes.js b/test/parallel/test-webcrypto-encrypt-decrypt-aes.js index ec1635c991ce13..38d2b70bcb0567 100644 --- a/test/parallel/test-webcrypto-encrypt-decrypt-aes.js +++ b/test/parallel/test-webcrypto-encrypt-decrypt-aes.js @@ -6,7 +6,7 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const { subtle } = require('crypto').webcrypto; +const { getRandomValues, subtle } = require('crypto').webcrypto; async function testEncrypt({ keyBuffer, algorithm, plaintext, result }) { const key = await subtle.importKey( @@ -196,3 +196,41 @@ async function testDecrypt({ keyBuffer, algorithm, result }) { await Promise.all(variations); })().then(common.mustCall()); } + +{ + (async function() { + const secretKey = await subtle.generateKey( + { + name: 'AES-GCM', + length: 256, + }, + false, + ['encrypt', 'decrypt'], + ); + + const iv = getRandomValues(new Uint8Array(12)); + const aad = getRandomValues(new Uint8Array(32)); + + const encrypted = await subtle.encrypt( + { + name: 'AES-GCM', + iv, + additionalData: aad, + tagLength: 128 + }, + secretKey, + getRandomValues(new Uint8Array(32)) + ); + + await subtle.decrypt( + { + name: 'AES-GCM', + iv, + additionalData: aad, + tagLength: 128, + }, + secretKey, + new Uint8Array(encrypted), + ); + })().then(common.mustCall()); +} From e54108f2e4a5e1c37628ae4dda5f5d8fec55d079 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Fri, 6 Nov 2020 23:37:39 +0100 Subject: [PATCH 31/98] cluster: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36011 Reviewed-By: Rich Trott --- lib/internal/cluster/child.js | 30 +++++++++++++--------- lib/internal/cluster/master.js | 24 +++++++++++------ lib/internal/cluster/round_robin_handle.js | 12 +++++---- lib/internal/cluster/shared_handle.js | 4 +-- lib/internal/cluster/utils.js | 7 ++--- lib/internal/cluster/worker.js | 7 ++--- 6 files changed, 51 insertions(+), 33 deletions(-) diff --git a/lib/internal/cluster/child.js b/lib/internal/cluster/child.js index 74f30c0d2ece90..90dce42fa6fa70 100644 --- a/lib/internal/cluster/child.js +++ b/lib/internal/cluster/child.js @@ -1,8 +1,11 @@ 'use strict'; const { - Map, + ArrayPrototypeJoin, + FunctionPrototype, ObjectAssign, + ReflectApply, + SafeMap, } = primordials; const assert = require('internal/assert'); @@ -12,9 +15,9 @@ const { owner_symbol } = require('internal/async_hooks').symbols; const Worker = require('internal/cluster/worker'); const { internal, sendHelper } = require('internal/cluster/utils'); const cluster = new EventEmitter(); -const handles = new Map(); -const indexes = new Map(); -const noop = () => {}; +const handles = new SafeMap(); +const indexes = new SafeMap(); +const noop = FunctionPrototype; module.exports = cluster; @@ -49,7 +52,7 @@ cluster._setupWorker = function() { if (message.act === 'newconn') onconnection(message, handle); else if (message.act === 'disconnect') - _disconnect.call(worker, true); + ReflectApply(_disconnect, worker, [true]); } }; @@ -62,10 +65,13 @@ cluster._getServer = function(obj, options, cb) { process.platform !== 'win32') address = path.resolve(address); - const indexesKey = [address, - options.port, - options.addressType, - options.fd ].join(':'); + const indexesKey = ArrayPrototypeJoin( + [ + address, + options.port, + options.addressType, + options.fd, + ], ':'); let index = indexes.get(indexesKey); @@ -119,7 +125,7 @@ function shared(message, handle, indexesKey, cb) { send({ act: 'close', key }); handles.delete(key); indexes.delete(indexesKey); - return close.apply(handle, arguments); + return ReflectApply(close, handle, arguments); }; assert(handles.has(key) === false); handles.set(key, handle); @@ -228,9 +234,9 @@ function _disconnect(masterInitiated) { // Extend generic Worker with methods specific to worker processes. Worker.prototype.disconnect = function() { - if (![ 'disconnecting', 'destroying' ].includes(this.state)) { + if (this.state !== 'disconnecting' && this.state !== 'destroying') { this.state = 'disconnecting'; - _disconnect.call(this); + ReflectApply(_disconnect, this, []); } return this; diff --git a/lib/internal/cluster/master.js b/lib/internal/cluster/master.js index 9e2c7cbecb9963..2ae18165695be7 100644 --- a/lib/internal/cluster/master.js +++ b/lib/internal/cluster/master.js @@ -1,9 +1,14 @@ 'use strict'; const { - Map, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSome, ObjectKeys, ObjectValues, + RegExpPrototypeTest, + SafeMap, + StringPrototypeStartsWith, } = primordials; const assert = require('internal/assert'); @@ -23,7 +28,7 @@ const { validatePort } = require('internal/validators'); module.exports = cluster; -const handles = new Map(); +const handles = new SafeMap(); cluster.isWorker = false; cluster.isMaster = true; cluster.Worker = Worker; @@ -53,7 +58,7 @@ cluster.schedulingPolicy = schedulingPolicy; cluster.setupMaster = function(options) { const settings = { - args: process.argv.slice(2), + args: ArrayPrototypeSlice(process.argv, 2), exec: process.argv[1], execArgv: process.execArgv, silent: false, @@ -65,8 +70,10 @@ cluster.setupMaster = function(options) { // Without --logfile=v8-%p.log, everything ends up in a single, unusable // file. (Unusable because what V8 logs are memory addresses and each // process has its own memory mappings.) - if (settings.execArgv.some((s) => s.startsWith('--prof')) && - !settings.execArgv.some((s) => s.startsWith('--logfile='))) { + if (ArrayPrototypeSome(settings.execArgv, + (s) => StringPrototypeStartsWith(s, '--prof')) && + !ArrayPrototypeSome(settings.execArgv, + (s) => StringPrototypeStartsWith(s, '--logfile='))) { settings.execArgv = [...settings.execArgv, '--logfile=v8-%p.log']; } @@ -109,8 +116,9 @@ function createWorkerProcess(id, env) { const nodeOptions = process.env.NODE_OPTIONS ? process.env.NODE_OPTIONS : ''; - if (execArgv.some((arg) => arg.match(debugArgRegex)) || - nodeOptions.match(debugArgRegex)) { + if (ArrayPrototypeSome(execArgv, + (arg) => RegExpPrototypeTest(debugArgRegex, arg)) || + RegExpPrototypeTest(debugArgRegex, nodeOptions)) { let inspectPort; if ('inspectPort' in cluster.settings) { if (typeof cluster.settings.inspectPort === 'function') @@ -126,7 +134,7 @@ function createWorkerProcess(id, env) { debugPortOffset++; } - execArgv.push(`--inspect-port=${inspectPort}`); + ArrayPrototypePush(execArgv, `--inspect-port=${inspectPort}`); } return fork(cluster.settings.exec, cluster.settings.args, { diff --git a/lib/internal/cluster/round_robin_handle.js b/lib/internal/cluster/round_robin_handle.js index 492fd725c82f1d..5dc53ef78b1a6e 100644 --- a/lib/internal/cluster/round_robin_handle.js +++ b/lib/internal/cluster/round_robin_handle.js @@ -2,8 +2,10 @@ const { ArrayIsArray, + ArrayPrototypePush, + ArrayPrototypeShift, Boolean, - Map, + SafeMap, } = primordials; const assert = require('internal/assert'); @@ -15,8 +17,8 @@ module.exports = RoundRobinHandle; function RoundRobinHandle(key, address, { port, fd, flags }) { this.key = key; - this.all = new Map(); - this.free = new Map(); + this.all = new SafeMap(); + this.free = new SafeMap(); this.handles = []; this.handle = null; this.server = net.createServer(assert.fail); @@ -90,7 +92,7 @@ RoundRobinHandle.prototype.remove = function(worker) { }; RoundRobinHandle.prototype.distribute = function(err, handle) { - this.handles.push(handle); + ArrayPrototypePush(this.handles, handle); const [ workerEntry ] = this.free; if (ArrayIsArray(workerEntry)) { @@ -105,7 +107,7 @@ RoundRobinHandle.prototype.handoff = function(worker) { return; // Worker is closing (or has closed) the server. } - const handle = this.handles.shift(); + const handle = ArrayPrototypeShift(this.handles); if (handle === undefined) { this.free.set(worker.id, worker); // Add to ready queue again. diff --git a/lib/internal/cluster/shared_handle.js b/lib/internal/cluster/shared_handle.js index 656b1292988948..87b83df20081b1 100644 --- a/lib/internal/cluster/shared_handle.js +++ b/lib/internal/cluster/shared_handle.js @@ -1,5 +1,5 @@ 'use strict'; -const { Map } = primordials; +const { SafeMap } = primordials; const assert = require('internal/assert'); const dgram = require('internal/dgram'); const net = require('net'); @@ -8,7 +8,7 @@ module.exports = SharedHandle; function SharedHandle(key, address, { port, addressType, fd, flags }) { this.key = key; - this.workers = new Map(); + this.workers = new SafeMap(); this.handle = null; this.errno = 0; diff --git a/lib/internal/cluster/utils.js b/lib/internal/cluster/utils.js index 9e7a1186ffc2bf..732d33e46936a8 100644 --- a/lib/internal/cluster/utils.js +++ b/lib/internal/cluster/utils.js @@ -1,7 +1,8 @@ 'use strict'; const { - Map, + ReflectApply, + SafeMap, } = primordials; module.exports = { @@ -9,7 +10,7 @@ module.exports = { internal }; -const callbacks = new Map(); +const callbacks = new SafeMap(); let seq = 0; function sendHelper(proc, message, handle, cb) { @@ -44,6 +45,6 @@ function internal(worker, cb) { } } - fn.apply(worker, arguments); + ReflectApply(fn, worker, arguments); }; } diff --git a/lib/internal/cluster/worker.js b/lib/internal/cluster/worker.js index 516b7a3b73d787..cdc1cd97ce06c8 100644 --- a/lib/internal/cluster/worker.js +++ b/lib/internal/cluster/worker.js @@ -2,6 +2,7 @@ const { ObjectSetPrototypeOf, + ReflectApply, } = primordials; const EventEmitter = require('events'); @@ -13,7 +14,7 @@ function Worker(options) { if (!(this instanceof Worker)) return new Worker(options); - EventEmitter.call(this); + ReflectApply(EventEmitter, this, []); if (options === null || typeof options !== 'object') options = {}; @@ -38,11 +39,11 @@ ObjectSetPrototypeOf(Worker.prototype, EventEmitter.prototype); ObjectSetPrototypeOf(Worker, EventEmitter); Worker.prototype.kill = function() { - this.destroy.apply(this, arguments); + ReflectApply(this.destroy, this, arguments); }; Worker.prototype.send = function() { - return this.process.send.apply(this.process, arguments); + return ReflectApply(this.process.send, this.process, arguments); }; Worker.prototype.isDead = function() { From 3be5e86c578b0aece90161f6af7c9cfb9c74d38e Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 11 Nov 2020 08:36:21 -0800 Subject: [PATCH 32/98] test: add util.inspect test for null maxStringLength Add test case to cover currently-uncovered code. Refs: https://coverage.nodejs.org/coverage-39a7f7663e8f70fc/lib/internal/util/inspect.js.html#L333 PR-URL: https://github.com/nodejs/node/pull/36086 Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca Reviewed-By: Daijiro Wachi --- test/parallel/test-util-inspect.js | 1 + 1 file changed, 1 insertion(+) diff --git a/test/parallel/test-util-inspect.js b/test/parallel/test-util-inspect.js index 4d7232b9fe51b0..634f27690e5a59 100644 --- a/test/parallel/test-util-inspect.js +++ b/test/parallel/test-util-inspect.js @@ -2906,6 +2906,7 @@ assert.strictEqual( util.inspect(x, { maxStringLength: 4 }), "'aaaa'... 999996 more characters" ); + assert.match(util.inspect(x, { maxStringLength: null }), /a'$/); } { From 75707f45eb448608740b295f37fbfc1d70e961eb Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 7 Nov 2020 17:23:14 +0100 Subject: [PATCH 33/98] lib,tools: enforce access to prototype from primordials PR-URL: https://github.com/nodejs/node/pull/36025 Reviewed-By: Shingo Inoue Reviewed-By: Rich Trott --- lib/internal/freeze_intrinsics.js | 138 +++++++++++------- lib/internal/process/warning.js | 7 +- .../test-eslint-prefer-primordials.js | 8 + tools/eslint-rules/prefer-primordials.js | 1 - 4 files changed, 100 insertions(+), 54 deletions(-) diff --git a/lib/internal/freeze_intrinsics.js b/lib/internal/freeze_intrinsics.js index 5e99af2d9ad0bb..50ae655206479a 100644 --- a/lib/internal/freeze_intrinsics.js +++ b/lib/internal/freeze_intrinsics.js @@ -25,25 +25,42 @@ const { Array, ArrayBuffer, + ArrayBufferPrototype, + ArrayPrototype, ArrayPrototypeForEach, + ArrayPrototypePush, BigInt, BigInt64Array, + BigInt64ArrayPrototype, + BigIntPrototype, BigUint64Array, + BigUint64ArrayPrototype, Boolean, + BooleanPrototype, DataView, + DataViewPrototype, Date, + DatePrototype, Error, + ErrorPrototype, EvalError, + EvalErrorPrototype, Float32Array, + Float32ArrayPrototype, Float64Array, + Float64ArrayPrototype, Function, + FunctionPrototype, Int16Array, + Int16ArrayPrototype, Int32Array, + Int32ArrayPrototype, Int8Array, - JSON, + Int8ArrayPrototype, Map, - Math, + MapPrototype, Number, + NumberPrototype, Object, ObjectDefineProperty, ObjectFreeze, @@ -52,28 +69,44 @@ const { ObjectGetOwnPropertyNames, ObjectGetOwnPropertySymbols, ObjectGetPrototypeOf, + ObjectPrototype, ObjectPrototypeHasOwnProperty, Promise, + PromisePrototype, RangeError, + RangeErrorPrototype, ReferenceError, - Reflect, + ReferenceErrorPrototype, ReflectOwnKeys, RegExp, + RegExpPrototype, + SafeSet, Set, + SetPrototype, String, + StringPrototype, Symbol, SymbolIterator, SyntaxError, + SyntaxErrorPrototype, TypeError, + TypeErrorPrototype, TypedArray, TypedArrayPrototype, Uint16Array, + Uint16ArrayPrototype, Uint32Array, + Uint32ArrayPrototype, Uint8Array, + Uint8ArrayPrototype, Uint8ClampedArray, + Uint8ClampedArrayPrototype, URIError, + URIErrorPrototype, WeakMap, + WeakMapPrototype, WeakSet, + WeakSetPrototype, } = primordials; module.exports = function() { @@ -110,55 +143,55 @@ module.exports = function() { TypedArrayPrototype, // 19 Fundamental Objects - Object.prototype, // 19.1 - Function.prototype, // 19.2 - Boolean.prototype, // 19.3 - - Error.prototype, // 19.5 - EvalError.prototype, - RangeError.prototype, - ReferenceError.prototype, - SyntaxError.prototype, - TypeError.prototype, - URIError.prototype, + ObjectPrototype, // 19.1 + FunctionPrototype, // 19.2 + BooleanPrototype, // 19.3 + + ErrorPrototype, // 19.5 + EvalErrorPrototype, + RangeErrorPrototype, + ReferenceErrorPrototype, + SyntaxErrorPrototype, + TypeErrorPrototype, + URIErrorPrototype, // 20 Numbers and Dates - Number.prototype, // 20.1 - Date.prototype, // 20.3 + NumberPrototype, // 20.1 + DatePrototype, // 20.3 // 21 Text Processing - String.prototype, // 21.1 - RegExp.prototype, // 21.2 + StringPrototype, // 21.1 + RegExpPrototype, // 21.2 // 22 Indexed Collections - Array.prototype, // 22.1 - - Int8Array.prototype, - Uint8Array.prototype, - Uint8ClampedArray.prototype, - Int16Array.prototype, - Uint16Array.prototype, - Int32Array.prototype, - Uint32Array.prototype, - Float32Array.prototype, - Float64Array.prototype, - BigInt64Array.prototype, - BigUint64Array.prototype, + ArrayPrototype, // 22.1 + + Int8ArrayPrototype, + Uint8ArrayPrototype, + Uint8ClampedArrayPrototype, + Int16ArrayPrototype, + Uint16ArrayPrototype, + Int32ArrayPrototype, + Uint32ArrayPrototype, + Float32ArrayPrototype, + Float64ArrayPrototype, + BigInt64ArrayPrototype, + BigUint64ArrayPrototype, // 23 Keyed Collections - Map.prototype, // 23.1 - Set.prototype, // 23.2 - WeakMap.prototype, // 23.3 - WeakSet.prototype, // 23.4 + MapPrototype, // 23.1 + SetPrototype, // 23.2 + WeakMapPrototype, // 23.3 + WeakSetPrototype, // 23.4 // 24 Structured Data - ArrayBuffer.prototype, // 24.1 - DataView.prototype, // 24.3 - Promise.prototype, // 25.4 + ArrayBufferPrototype, // 24.1 + DataViewPrototype, // 24.3 + PromisePrototype, // 25.4 // Other APIs / Web Compatibility console.Console.prototype, - BigInt.prototype, + BigIntPrototype, WebAssembly.Module.prototype, WebAssembly.Instance.prototype, WebAssembly.Table.prototype, @@ -171,7 +204,7 @@ module.exports = function() { const intrinsics = [ // Anonymous Intrinsics // ThrowTypeError - ObjectGetOwnPropertyDescriptor(Function.prototype, 'caller').get, + ObjectGetOwnPropertyDescriptor(FunctionPrototype, 'caller').get, // IteratorPrototype ObjectGetPrototypeOf( ObjectGetPrototypeOf(new Array()[SymbolIterator]()) @@ -224,6 +257,7 @@ module.exports = function() { // 20 Numbers and Dates Number, // 20.1 + // eslint-disable-next-line node-core/prefer-primordials Math, // 20.2 Date, // 20.3 @@ -255,10 +289,12 @@ module.exports = function() { // 24 Structured Data ArrayBuffer, // 24.1 DataView, // 24.3 + // eslint-disable-next-line node-core/prefer-primordials JSON, // 24.5 Promise, // 25.4 // 26 Reflection + // eslint-disable-next-line node-core/prefer-primordials Reflect, // 26.1 Proxy, // 26.2 @@ -281,19 +317,21 @@ module.exports = function() { ]; if (typeof Intl !== 'undefined') { - intrinsicPrototypes.push(Intl.Collator.prototype); - intrinsicPrototypes.push(Intl.DateTimeFormat.prototype); - intrinsicPrototypes.push(Intl.ListFormat.prototype); - intrinsicPrototypes.push(Intl.NumberFormat.prototype); - intrinsicPrototypes.push(Intl.PluralRules.prototype); - intrinsicPrototypes.push(Intl.RelativeTimeFormat.prototype); - intrinsics.push(Intl); + ArrayPrototypePush(intrinsicPrototypes, + Intl.Collator.prototype, + Intl.DateTimeFormat.prototype, + Intl.ListFormat.prototype, + Intl.NumberFormat.prototype, + Intl.PluralRules.prototype, + Intl.RelativeTimeFormat.prototype, + ); + ArrayPrototypePush(intrinsics, Intl); } - intrinsicPrototypes.forEach(enableDerivedOverrides); + ArrayPrototypeForEach(intrinsicPrototypes, enableDerivedOverrides); const frozenSet = new WeakSet(); - intrinsics.forEach(deepFreeze); + ArrayPrototypeForEach(intrinsics, deepFreeze); // Objects that are deeply frozen. function deepFreeze(root) { @@ -306,7 +344,7 @@ module.exports = function() { */ function innerDeepFreeze(node) { // Objects that we have frozen in this round. - const freezingSet = new Set(); + const freezingSet = new SafeSet(); // If val is something we should be freezing but aren't yet, // add it to freezingSet. diff --git a/lib/internal/process/warning.js b/lib/internal/process/warning.js index 3182988fe2ba09..eafe8d2203ee6f 100644 --- a/lib/internal/process/warning.js +++ b/lib/internal/process/warning.js @@ -3,6 +3,7 @@ const { ArrayIsArray, Error, + ErrorPrototypeToString, ErrorCaptureStackTrace, String, } = primordials; @@ -81,10 +82,10 @@ function onWarning(warning) { if (trace && warning.stack) { msg += `${warning.stack}`; } else { - const toString = + msg += typeof warning.toString === 'function' ? - warning.toString : Error.prototype.toString; - msg += `${toString.apply(warning)}`; + `${warning.toString()}` : + ErrorPrototypeToString(warning); } if (typeof warning.detail === 'string') { msg += `\n${warning.detail}`; diff --git a/test/parallel/test-eslint-prefer-primordials.js b/test/parallel/test-eslint-prefer-primordials.js index 6a28f541e4de84..2d04f9a1081c56 100644 --- a/test/parallel/test-eslint-prefer-primordials.js +++ b/test/parallel/test-eslint-prefer-primordials.js @@ -163,5 +163,13 @@ new RuleTester({ options: [{ name: 'Map', into: 'Safe' }], errors: [{ message: /const { SafeMap } = primordials/ }] }, + { + code: ` + const { Function } = primordials; + const noop = Function.prototype; + `, + options: [{ name: 'Function' }], + errors: [{ message: /const { FunctionPrototype } = primordials/ }] + }, ] }); diff --git a/tools/eslint-rules/prefer-primordials.js b/tools/eslint-rules/prefer-primordials.js index ffbb1e6e308c95..51fb6ab8c2ad44 100644 --- a/tools/eslint-rules/prefer-primordials.js +++ b/tools/eslint-rules/prefer-primordials.js @@ -75,7 +75,6 @@ module.exports = { acc.set( option.name, (option.ignore || []) - .concat(['prototype']) .reduce((acc, name) => acc.set(name, { ignored: true }), new Map()) From 66788970ac1dd79d948fc0fa5f9e2d1e4127b7b4 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 7 Nov 2020 10:19:54 +0100 Subject: [PATCH 34/98] esm: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36019 Reviewed-By: Rich Trott --- .../modules/esm/create_dynamic_module.js | 4 ++-- lib/internal/modules/esm/get_format.js | 10 ++++++-- lib/internal/modules/esm/get_source.js | 12 ++++++---- lib/internal/modules/esm/loader.js | 5 ++-- lib/internal/modules/esm/module_job.js | 24 ++++++++++++------- lib/internal/modules/esm/resolve.js | 3 ++- lib/internal/modules/esm/translators.js | 16 +++++++++---- test/parallel/test-bootstrap-modules.js | 2 ++ 8 files changed, 50 insertions(+), 26 deletions(-) diff --git a/lib/internal/modules/esm/create_dynamic_module.js b/lib/internal/modules/esm/create_dynamic_module.js index e831db8daaf476..f7c20083b6c918 100644 --- a/lib/internal/modules/esm/create_dynamic_module.js +++ b/lib/internal/modules/esm/create_dynamic_module.js @@ -5,7 +5,7 @@ const { ArrayPrototypeMap, JSONStringify, ObjectCreate, - Set, + SafeSet, } = primordials; let debug = require('internal/util/debuglog').debuglog('esm', (fn) => { @@ -38,7 +38,7 @@ import.meta.done(); const { ModuleWrap, callbackMap } = internalBinding('module_wrap'); const m = new ModuleWrap(`${url}`, undefined, source, 0, 0); - const readyfns = new Set(); + const readyfns = new SafeSet(); const reflect = { exports: ObjectCreate(null), onReady: (cb) => { readyfns.add(cb); }, diff --git a/lib/internal/modules/esm/get_format.js b/lib/internal/modules/esm/get_format.js index 16e2ad5e2d5c3e..51b207ea75d131 100644 --- a/lib/internal/modules/esm/get_format.js +++ b/lib/internal/modules/esm/get_format.js @@ -1,5 +1,8 @@ 'use strict'; -const { StringPrototypeStartsWith } = primordials; +const { + RegExpPrototypeExec, + StringPrototypeStartsWith, +} = primordials; const { extname } = require('path'); const { getOptionValue } = require('internal/options'); @@ -39,7 +42,10 @@ function defaultGetFormat(url, context, defaultGetFormatUnused) { } const parsed = new URL(url); if (parsed.protocol === 'data:') { - const [ , mime ] = /^([^/]+\/[^;,]+)(?:[^,]*?)(;base64)?,/.exec(parsed.pathname) || [ null, null, null ]; + const [ , mime ] = RegExpPrototypeExec( + /^([^/]+\/[^;,]+)(?:[^,]*?)(;base64)?,/, + parsed.pathname, + ) || [ null, null, null ]; const format = ({ '__proto__': null, 'text/javascript': 'module', diff --git a/lib/internal/modules/esm/get_source.js b/lib/internal/modules/esm/get_source.js index 54dbd029fcfc22..b2cf0c3bd28aa2 100644 --- a/lib/internal/modules/esm/get_source.js +++ b/lib/internal/modules/esm/get_source.js @@ -1,5 +1,8 @@ 'use strict'; +const { + RegExpPrototypeExec, +} = primordials; const { getOptionValue } = require('internal/options'); // Do not eagerly grab .manifest, it may be in TDZ const policy = getOptionValue('--experimental-policy') ? @@ -8,14 +11,13 @@ const policy = getOptionValue('--experimental-policy') ? const { Buffer } = require('buffer'); -const fs = require('fs'); -const { URL } = require('url'); -const { promisify } = require('internal/util'); +const fs = require('internal/fs/promises').exports; +const { URL } = require('internal/url'); const { ERR_INVALID_URL, ERR_INVALID_URL_SCHEME, } = require('internal/errors').codes; -const readFileAsync = promisify(fs.readFile); +const readFileAsync = fs.readFile; const DATA_URL_PATTERN = /^[^/]+\/[^,;]+(?:[^,]*?)(;base64)?,([\s\S]*)$/; @@ -25,7 +27,7 @@ async function defaultGetSource(url, { format } = {}, defaultGetSource) { if (parsed.protocol === 'file:') { source = await readFileAsync(parsed); } else if (parsed.protocol === 'data:') { - const match = DATA_URL_PATTERN.exec(parsed.pathname); + const match = RegExpPrototypeExec(DATA_URL_PATTERN, parsed.pathname); if (!match) { throw new ERR_INVALID_URL(url); } diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 110464cbdb1da3..232177698c73f6 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -7,6 +7,7 @@ const { FunctionPrototypeBind, ObjectSetPrototypeOf, SafeWeakMap, + StringPrototypeStartsWith, } = primordials; const { @@ -126,8 +127,8 @@ class Loader { } if (this._resolve === defaultResolve && - !url.startsWith('file:') && - !url.startsWith('data:') + !StringPrototypeStartsWith(url, 'file:') && + !StringPrototypeStartsWith(url, 'data:') ) { throw new ERR_INVALID_RETURN_PROPERTY( 'file: or data: url', 'loader resolve', 'url', url diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index 7b8f146771c530..549d43cc20e119 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -2,10 +2,15 @@ const { ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePush, + FunctionPrototype, ObjectSetPrototypeOf, PromiseAll, + PromiseResolve, + PromisePrototypeCatch, + ReflectApply, SafeSet, - SafePromise, StringPrototypeIncludes, StringPrototypeMatch, StringPrototypeReplace, @@ -16,9 +21,9 @@ const { ModuleWrap } = internalBinding('module_wrap'); const { decorateErrorStack } = require('internal/util'); const assert = require('internal/assert'); -const resolvedPromise = SafePromise.resolve(); +const resolvedPromise = PromiseResolve(); -function noop() {} +const noop = FunctionPrototype; let hasPausedEntry = false; @@ -35,7 +40,7 @@ class ModuleJob { this.module = undefined; // Expose the promise to the ModuleWrap directly for linking below. // `this.module` is also filled in below. - this.modulePromise = moduleProvider.call(loader, url, isMain); + this.modulePromise = ReflectApply(moduleProvider, loader, [url, isMain]); // Wait for the ModuleWrap instance being linked with all dependencies. const link = async () => { @@ -49,21 +54,21 @@ class ModuleJob { const dependencyJobs = []; const promises = this.module.link(async (specifier) => { const jobPromise = this.loader.getModuleJob(specifier, url); - dependencyJobs.push(jobPromise); + ArrayPrototypePush(dependencyJobs, jobPromise); const job = await jobPromise; return job.modulePromise; }); if (promises !== undefined) - await SafePromise.all(promises); + await PromiseAll(promises); - return SafePromise.all(dependencyJobs); + return PromiseAll(dependencyJobs); }; // Promise for the list of all dependencyJobs. this.linked = link(); // This promise is awaited later anyway, so silence // 'unhandled rejection' warnings. - this.linked.catch(noop); + PromisePrototypeCatch(this.linked, noop); // instantiated == deep dependency jobs wrappers are instantiated, // and module wrapper is instantiated. @@ -85,7 +90,8 @@ class ModuleJob { } jobsInGraph.add(moduleJob); const dependencyJobs = await moduleJob.linked; - return PromiseAll(dependencyJobs.map(addJobsToDependencyGraph)); + return PromiseAll( + ArrayPrototypeMap(dependencyJobs, addJobsToDependencyGraph)); }; await addJobsToDependencyGraph(this); diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index 1c1598d6251047..8a5bc841ada22a 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -880,7 +880,8 @@ function defaultResolve(specifier, context = {}, defaultResolveUnused) { [internalFS.realpathCacheKey]: realpathCache }); const old = url; - url = pathToFileURL(real + (urlPath.endsWith(sep) ? '/' : '')); + url = pathToFileURL( + real + (StringPrototypeEndsWith(urlPath, sep) ? '/' : '')); url.search = old.search; url.hash = old.hash; } diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js index 9ebe8586ba9270..66636d9ce092b5 100644 --- a/lib/internal/modules/esm/translators.js +++ b/lib/internal/modules/esm/translators.js @@ -3,6 +3,7 @@ /* global WebAssembly */ const { + ArrayPrototypeMap, Boolean, JSONParse, ObjectGetPrototypeOf, @@ -14,6 +15,7 @@ const { SafeMap, SafeSet, StringPrototypeReplace, + StringPrototypeSlice, StringPrototypeSplit, StringPrototypeStartsWith, SyntaxErrorPrototype, @@ -277,9 +279,9 @@ function cjsPreparseModuleExports(filename) { translators.set('builtin', async function builtinStrategy(url) { debug(`Translating BuiltinModule ${url}`); // Slice 'node:' scheme - const id = url.slice(5); + const id = StringPrototypeSlice(url, 5); const module = loadNativeModule(id, url, true); - if (!url.startsWith('node:') || !module) { + if (!StringPrototypeStartsWith(url, 'node:') || !module) { throw new ERR_UNKNOWN_BUILTIN_MODULE(url); } debug(`Loading BuiltinModule ${url}`); @@ -291,7 +293,8 @@ translators.set('json', async function jsonStrategy(url) { emitExperimentalWarning('Importing JSON modules'); debug(`Translating JSONModule ${url}`); debug(`Loading JSONModule ${url}`); - const pathname = url.startsWith('file:') ? fileURLToPath(url) : null; + const pathname = StringPrototypeStartsWith(url, 'file:') ? + fileURLToPath(url) : null; let modulePath; let module; if (pathname) { @@ -365,8 +368,11 @@ translators.set('wasm', async function(url) { } const imports = - WebAssembly.Module.imports(compiled).map(({ module }) => module); - const exports = WebAssembly.Module.exports(compiled).map(({ name }) => name); + ArrayPrototypeMap(WebAssembly.Module.imports(compiled), + ({ module }) => module); + const exports = + ArrayPrototypeMap(WebAssembly.Module.exports(compiled), + ({ name }) => name); return createDynamicModule(imports, exports, url, (reflect) => { const { exports } = new WebAssembly.Instance(compiled, reflect.imports); diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index b9844dc4caf683..a6f2c15c19699e 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -49,6 +49,8 @@ const expectedModules = new Set([ 'NativeModule internal/fixed_queue', 'NativeModule internal/fs/dir', 'NativeModule internal/fs/utils', + 'NativeModule internal/fs/promises', + 'NativeModule internal/fs/rimraf', 'NativeModule internal/idna', 'NativeModule internal/linkedlist', 'NativeModule internal/modules/run_main', From f7b2fce1c134688ca616513b10597327c957b37e Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 7 Nov 2020 10:27:46 +0100 Subject: [PATCH 35/98] vm: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36023 Reviewed-By: Rich Trott --- lib/internal/vm/module.js | 17 ++++++++++------- lib/vm.js | 5 +++-- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/lib/internal/vm/module.js b/lib/internal/vm/module.js index ce37312d652943..30ce655bd0d8b9 100644 --- a/lib/internal/vm/module.js +++ b/lib/internal/vm/module.js @@ -3,15 +3,18 @@ const assert = require('internal/assert'); const { ArrayIsArray, + ArrayPrototypeForEach, + ArrayPrototypeIndexOf, + ArrayPrototypeSome, ObjectCreate, ObjectDefineProperty, ObjectGetPrototypeOf, ObjectSetPrototypeOf, - SafePromise, + PromiseAll, + SafeWeakMap, Symbol, SymbolToStringTag, TypeError, - WeakMap, } = primordials; const { isContext } = internalBinding('contextify'); @@ -62,7 +65,7 @@ const STATUS_MAP = { let globalModuleId = 0; const defaultModuleName = 'vm:module'; -const wrapToModuleMap = new WeakMap(); +const wrapToModuleMap = new SafeWeakMap(); const kWrap = Symbol('kWrap'); const kContext = Symbol('kContext'); @@ -332,7 +335,7 @@ class SourceTextModule extends Module { try { if (promises !== undefined) { - await SafePromise.all(promises); + await PromiseAll(promises); } } catch (e) { this.#error = e; @@ -392,13 +395,13 @@ class SourceTextModule extends Module { class SyntheticModule extends Module { constructor(exportNames, evaluateCallback, options = {}) { if (!ArrayIsArray(exportNames) || - exportNames.some((e) => typeof e !== 'string')) { + ArrayPrototypeSome(exportNames, (e) => typeof e !== 'string')) { throw new ERR_INVALID_ARG_TYPE('exportNames', 'Array of unique strings', exportNames); } else { - exportNames.forEach((name, i) => { - if (exportNames.indexOf(name, i + 1) !== -1) { + ArrayPrototypeForEach(exportNames, (name, i) => { + if (ArrayPrototypeIndexOf(exportNames, name, i + 1) !== -1) { throw new ERR_INVALID_ARG_VALUE(`exportNames.${name}`, name, 'is duplicated'); diff --git a/lib/vm.js b/lib/vm.js index 45a2edf0bb20b3..33893845084141 100644 --- a/lib/vm.js +++ b/lib/vm.js @@ -24,7 +24,8 @@ const { ArrayPrototypeForEach, Symbol, - PromiseReject + PromiseReject, + ReflectApply, } = primordials; const { @@ -269,7 +270,7 @@ function sigintHandlersWrap(fn, thisArg, argsArray) { process.removeAllListeners('SIGINT'); try { - return fn.apply(thisArg, argsArray); + return ReflectApply(fn, thisArg, argsArray); } finally { // Add using the public methods so that the `newListener` handler of // process can re-attach the listeners. From 1550073dbc2f908bd9f4bed198e3fa2f30893ea0 Mon Sep 17 00:00:00 2001 From: raisinten Date: Thu, 12 Nov 2020 19:56:06 +0530 Subject: [PATCH 36/98] events: disabled manual construction AbortSignal Fixes: https://github.com/nodejs/node/issues/36064 PR-URL: https://github.com/nodejs/node/pull/36094 Reviewed-By: James M Snell Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca Reviewed-By: Zeyu Yang Reviewed-By: Andrey Pechkurov Reviewed-By: Rich Trott --- lib/internal/abort_controller.js | 16 +++++++++++++++- test/parallel/test-abortcontroller.js | 11 ++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index b387e5f4e2cea0..37001dc733982b 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -6,7 +6,9 @@ const { ObjectAssign, ObjectDefineProperties, + ObjectSetPrototypeOf, Symbol, + TypeError, } = primordials; const { @@ -35,6 +37,11 @@ function customInspect(self, obj, depth, options) { } class AbortSignal extends EventTarget { + constructor() { + // eslint-disable-next-line no-restricted-syntax + throw new TypeError('Illegal constructor'); + } + get aborted() { return !!this[kAborted]; } [customInspectSymbol](depth, options) { @@ -50,6 +57,13 @@ ObjectDefineProperties(AbortSignal.prototype, { defineEventHandler(AbortSignal.prototype, 'abort'); +function createAbortSignal() { + const signal = new EventTarget(); + ObjectSetPrototypeOf(signal, AbortSignal.prototype); + signal[kAborted] = false; + return signal; +} + function abortSignal(signal) { if (signal[kAborted]) return; signal[kAborted] = true; @@ -65,7 +79,7 @@ function abortSignal(signal) { const kSignal = Symbol('signal'); class AbortController { constructor() { - this[kSignal] = new AbortSignal(); + this[kSignal] = createAbortSignal(); emitExperimentalWarning('AbortController'); } diff --git a/test/parallel/test-abortcontroller.js b/test/parallel/test-abortcontroller.js index 8910ac7155762b..673a774ef148c3 100644 --- a/test/parallel/test-abortcontroller.js +++ b/test/parallel/test-abortcontroller.js @@ -3,7 +3,7 @@ const common = require('../common'); -const { ok, strictEqual } = require('assert'); +const { ok, strictEqual, throws } = require('assert'); { // Tests that abort is fired with the correct event type on AbortControllers @@ -51,3 +51,12 @@ const { ok, strictEqual } = require('assert'); strictEqual(firstTrusted, secondTrusted); strictEqual(untrusted, firstTrusted); } + +{ + // Tests that AbortSignal is impossible to construct manually + const ac = new AbortController(); + throws( + () => new ac.signal.constructor(), + /^TypeError: Illegal constructor$/ + ); +} From 9405cddbeef44ed14ec0d6c48f003dddec2fe5df Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Mon, 9 Nov 2020 21:23:50 +0100 Subject: [PATCH 37/98] test: improve test-stream-duplex-readable-end - Remove unneeded listener for the `'error'` event. - Use `common.mustCall()`. - Verify that the `src` stream gets paused. PR-URL: https://github.com/nodejs/node/pull/36056 Refs: https://github.com/nodejs/node/pull/35941 Reviewed-By: Daijiro Wachi Reviewed-By: Rich Trott Reviewed-By: Benjamin Gruenbaum Reviewed-By: Ricky Zhou <0x19951125@gmail.com> --- test/parallel/test-stream-duplex-readable-end.js | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js index ca3ccf63c49474..0e3e62aacb14bc 100644 --- a/test/parallel/test-stream-duplex-readable-end.js +++ b/test/parallel/test-stream-duplex-readable-end.js @@ -1,6 +1,6 @@ 'use strict'; // https://github.com/nodejs/node/issues/35926 -require('../common'); +const common = require('../common'); const assert = require('assert'); const stream = require('stream'); @@ -22,11 +22,8 @@ const dst = new stream.Transform({ src.pipe(dst); -function parser_end() { - assert.ok(loops > 0); - dst.removeAllListeners(); -} - dst.on('data', () => { }); -dst.on('end', parser_end); -dst.on('error', parser_end); +dst.on('end', common.mustCall(() => { + assert.strictEqual(loops, 3); + assert.ok(src.isPaused()); +})); From 6349b1d6731584e70bb4fc49e4cd0cce69ee6996 Mon Sep 17 00:00:00 2001 From: Szymon Marczak <36894700+szmarczak@users.noreply.github.com> Date: Wed, 11 Nov 2020 22:22:41 +0100 Subject: [PATCH 38/98] dns: add a cancel() method to the promise Resolver MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/33099 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Juan José Arboleda Reviewed-By: Antoine du Hamel --- doc/api/dns.md | 8 +++ lib/internal/dns/promises.js | 1 + .../test-dns-channel-cancel-promise.js | 65 +++++++++++++++++++ test/parallel/test-dns-channel-cancel.js | 49 ++++++++++---- 4 files changed, 110 insertions(+), 13 deletions(-) create mode 100644 test/parallel/test-dns-channel-cancel-promise.js diff --git a/doc/api/dns.md b/doc/api/dns.md index 613bd2465b9464..727b5dc9b2dde8 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -730,6 +730,14 @@ The following methods from the `dnsPromises` API are available: * [`resolver.reverse()`][`dnsPromises.reverse()`] * [`resolver.setServers()`][`dnsPromises.setServers()`] +### `resolver.cancel()` + + +Cancel all outstanding DNS queries made by this resolver. The corresponding +promises will be rejected with an error with code `ECANCELLED`. + ### `dnsPromises.getServers()` -Specify the `module` of a custom [experimental ECMAScript Module loader][]. +Specify the `module` of a custom experimental [ECMAScript Module loader][]. `module` may be either a path to a file, or an ECMAScript Module name. ### `--experimental-modules` @@ -1642,6 +1642,7 @@ $ node --max-old-space-size=1536 index.js ``` [Chrome DevTools Protocol]: https://chromedevtools.github.io/devtools-protocol/ +[ECMAScript Module loader]: esm.md#esm_loaders [REPL]: repl.md [ScriptCoverage]: https://chromedevtools.github.io/devtools-protocol/tot/Profiler#type-ScriptCoverage [Source Map]: https://sourcemaps.info/spec.html @@ -1662,7 +1663,6 @@ $ node --max-old-space-size=1536 index.js [debugger]: debugger.md [debugging security implications]: https://nodejs.org/en/docs/guides/debugging-getting-started/#security-implications [emit_warning]: process.md#process_process_emitwarning_warning_type_code_ctor -[experimental ECMAScript Module loader]: esm.md#esm_experimental_loaders [jitless]: https://v8.dev/blog/jitless [libuv threadpool documentation]: https://docs.libuv.org/en/latest/threadpool.html [remote code execution]: https://www.owasp.org/index.php/Code_Injection diff --git a/doc/api/esm.md b/doc/api/esm.md index 7770297ad7af94..e631f6c3926162 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -71,9 +71,9 @@ Expect major changes in the implementation including interoperability support, specifier resolution, and default behavior. - - - + + + ## Enabling @@ -114,44 +114,65 @@ The _specifier_ of an `import` statement is the string after the `from` keyword, e.g. `'path'` in `import { sep } from 'path'`. Specifiers are also used in `export from` statements, and as the argument to an `import()` expression. -There are four types of specifiers: - -* _Bare specifiers_ like `'some-package'`. They refer to an entry point of a - package by the package name. - -* _Deep import specifiers_ like `'some-package/lib/shuffle.mjs'`. They refer to - a path within a package prefixed by the package name. +There are three types of specifiers: * _Relative specifiers_ like `'./startup.js'` or `'../config.mjs'`. They refer - to a path relative to the location of the importing file. + to a path relative to the location of the importing file. _The file extension + is always necessary for these._ + +* _Bare specifiers_ like `'some-package'` or `'some-package/shuffle'`. They can + refer to the main entry point of a package by the package name, or a + specific feature module within a package prefixed by the package name as per + the examples respectively. _Including the file extension is only necessary + for packages without an [`"exports"`][] field._ * _Absolute specifiers_ like `'file:///opt/nodejs/config.js'`. They refer directly and explicitly to a full path. -Bare specifiers, and the bare specifier portion of deep import specifiers, are -strings; but everything else in a specifier is a URL. +Bare specifier resolutions are handled by the [Node.js module resolution +algorithm][]. All other specifier resolutions are always only resolved with +the standard relative [URL][] resolution semantics. -`file:`, `node:`, and `data:` URLs are supported. A specifier like -`'https://example.com/app.js'` may be supported by browsers but it is not -supported in Node.js. +Like in CommonJS, module files within packages can be accessed by appending a +path to the package name unless the package’s [`package.json`][] contains an +[`"exports"`][] field, in which case files within packages can only be accessed +via the paths defined in [`"exports"`][]. -Specifiers may not begin with `/` or `//`. These are reserved for potential -future use. The root of the current volume may be referenced via `file:///`. +For details on these package resolution rules that apply to bare specifiers in +the Node.js module resolution, see the [packages documentation](packages.md). -#### `node:` Imports +### Mandatory file extensions - +A file extension must be provided when using the `import` keyword to resolve +relative or absolute specifiers. Directory indexes (e.g. `'./startup/index.js'`) +must also be fully specified. + +This behavior matches how `import` behaves in browser environments, assuming a +typically configured server. -`node:` URLs are supported as a means to load Node.js builtin modules. This -URL scheme allows for builtin modules to be referenced by valid absolute URL -strings. +### URLs + +ES modules are resolved and cached as URLs. This means that files containing +special characters such as `#` and `?` need to be escaped. + +`file:`, `node:`, and `data:` URL schemes are supported. A specifier like +`'https://example.com/app.js'` is not supported natively in Node.js unless using +a [custom HTTPS loader][]. + +#### `file:` URLs + +Modules are loaded multiple times if the `import` specifier used to resolve +them has a different query or fragment. ```js -import fs from 'node:fs/promises'; +import './foo.mjs?query=1'; // loads ./foo.mjs with query of "?query=1" +import './foo.mjs?query=2'; // loads ./foo.mjs with query of "?query=2" ``` +The volume root may be referenced via `/`, `//` or `file:///`. Given the +differences between [URL][] and path resolution (such as percent encoding +details), it is recommended to use [url.pathToFileURL][] when importing a path. + #### `data:` Imports -The `import.meta` metaproperty is an `Object` that contains the following -property: +`node:` URLs are supported as an alternative means to load Node.js builtin +modules. This URL scheme allows for builtin modules to be referenced by valid +absolute URL strings. -* `url` {string} The absolute `file:` URL of the module. +```js +import fs from 'node:fs/promises'; +``` -## Differences between ES modules and CommonJS +## Builtin modules -### Mandatory file extensions +[Core modules][] provide named exports of their public API. A +default export is also provided which is the value of the CommonJS exports. +The default export can be used for, among other things, modifying the named +exports. Named exports of builtin modules are updated only by calling +[`module.syncBuiltinESMExports()`][]. -A file extension must be provided when using the `import` keyword. Directory -indexes (e.g. `'./startup/index.js'`) must also be fully specified. +```js +import EventEmitter from 'events'; +const e = new EventEmitter(); +``` -This behavior matches how `import` behaves in browser environments, assuming a -typically configured server. +```js +import { readFile } from 'fs'; +readFile('./foo.txt', (err, source) => { + if (err) { + console.error(err); + } else { + console.log(source); + } +}); +``` -### No `NODE_PATH` +```js +import fs, { readFileSync } from 'fs'; +import { syncBuiltinESMExports } from 'module'; -`NODE_PATH` is not part of resolving `import` specifiers. Please use symlinks -if this behavior is desired. +fs.readFileSync = () => Buffer.from('Hello, ESM'); +syncBuiltinESMExports(); -### No `require`, `exports`, `module.exports`, `__filename`, `__dirname` +fs.readFileSync === readFileSync; +``` -These CommonJS variables are not available in ES modules. +## `import()` expressions -`require` can be imported into an ES module using [`module.createRequire()`][]. +[Dynamic `import()`][] is supported in both CommonJS and ES modules. In CommonJS +modules it can be used to load ES modules. -Equivalents of `__filename` and `__dirname` can be created inside of each file -via [`import.meta.url`][]. +## `import.meta` -```js -import { fileURLToPath } from 'url'; -import { dirname } from 'path'; +* {Object} -const __filename = fileURLToPath(import.meta.url); -const __dirname = dirname(__filename); -``` +The `import.meta` meta property is an `Object` that contains the following +properties. -### No `require.resolve` +### `import.meta.url` -Former use cases relying on `require.resolve` to determine the resolved path -of a module can be supported via `import.meta.resolve`, which is experimental -and supported via the `--experimental-import-meta-resolve` flag: +* {string} The absolute `file:` URL of the module. -```js -(async () => { - const dependencyAsset = await import.meta.resolve('component-lib/asset.css'); -})(); -``` +This is defined exactly the same as it is in browsers providing the URL of the +current module file. -`import.meta.resolve` also accepts a second argument which is the parent module -from which to resolve from: +This enables useful patterns such as relative file loading: ```js -(async () => { - // Equivalent to import.meta.resolve('./dep') - await import.meta.resolve('./dep', import.meta.url); -})(); +import { readFileSync } from 'fs'; +const buffer = readFileSync(new URL('./data.proto', import.meta.url)); ``` -This function is asynchronous because the ES module resolver in Node.js is -asynchronous. With the introduction of [Top-Level Await][], these use cases -will be easier as they won't require an async function wrapper. - -### No `require.extensions` - -`require.extensions` is not used by `import`. The expectation is that loader -hooks can provide this workflow in the future. +### `import.meta.resolve(specifier[, parent])` -### No `require.cache` +> Stability: 1 - Experimental -`require.cache` is not used by `import`. It has a separate cache. +* `specifier` {string} The module specifier to resolve relative to `parent`. +* `parent` {string|URL} The absolute parent module URL to resolve from. If none + is specified, the value of `import.meta.url` is used as the default. +* Returns: {Promise} -### URL-based paths +Provides a module-relative resolution function scoped to each module, returning +the URL string. -ES modules are resolved and cached based upon -[URL](https://url.spec.whatwg.org/) semantics. This means that files containing -special characters such as `#` and `?` need to be escaped. + +```js +const dependencyAsset = await import.meta.resolve('component-lib/asset.css'); +``` -Modules are loaded multiple times if the `import` specifier used to resolve -them has a different query or fragment. +`import.meta.resolve` also accepts a second argument which is the parent module +from which to resolve from: + ```js -import './foo.mjs?query=1'; // loads ./foo.mjs with query of "?query=1" -import './foo.mjs?query=2'; // loads ./foo.mjs with query of "?query=2" +await import.meta.resolve('./dep', import.meta.url); ``` -For now, only modules using the `file:` protocol can be loaded. +This function is asynchronous because the ES module resolver in Node.js is +allowed to be asynchronous. ## Interoperability with CommonJS -### `require` - -`require` always treats the files it references as CommonJS. This applies -whether `require` is used the traditional way within a CommonJS environment, or -in an ES module environment using [`module.createRequire()`][]. - -To include an ES module into CommonJS, use [`import()`][]. - ### `import` statements An `import` statement can reference an ES module or a CommonJS module. -`import` statements are permitted only in ES modules. For similar functionality -in CommonJS, see [`import()`][]. +`import` statements are permitted only in ES modules, but dynamic [`import()`][] +expressions are supported in CommonJS for loading ES modules. When importing [CommonJS modules](#esm_commonjs_namespaces), the `module.exports` object is provided as the default export. Named exports may be available, provided by static analysis as a convenience for better ecosystem compatibility. -Additional experimental flags are available for importing -[Wasm modules](#esm_experimental_wasm_modules) or -[JSON modules](#esm_experimental_json_modules). For importing native modules or -JSON modules unflagged, see [`module.createRequire()`][]. - -The _specifier_ of an `import` statement (the string after the `from` keyword) -can either be an URL-style relative path like `'./file.mjs'` or a package name -like `'fs'`. - -Like in CommonJS, files within packages can be accessed by appending a path to -the package name; unless the package’s [`package.json`][] contains an -[`"exports"`][] field, in which case files within packages need to be accessed -via the path defined in [`"exports"`][]. - -```js -import { sin, cos } from 'geometry/trigonometry-functions.mjs'; -``` +### `require` -### `import()` expressions +The CommonJS module `require` always treats the files it references as CommonJS. -[Dynamic `import()`][] is supported in both CommonJS and ES modules. It can be -used to include ES module files from CommonJS code. +Using `require` to load an ES module is not supported because ES modules have +asynchronous execution. Instead, use use [`import()`][] to load an ES module +from a CommonJS module. -## CommonJS Namespaces +### CommonJS Namespaces CommonJS modules consist of a `module.exports` object which can be of any type. @@ -396,59 +404,73 @@ Named exports detection covers many common export patterns, reexport patterns and build tool and transpiler outputs. See [cjs-module-lexer][] for the exact semantics implemented. -## Builtin modules +### Differences between ES modules and CommonJS -[Core modules][] provide named exports of their public API. A -default export is also provided which is the value of the CommonJS exports. -The default export can be used for, among other things, modifying the named -exports. Named exports of builtin modules are updated only by calling -[`module.syncBuiltinESMExports()`][]. +#### No `require`, `exports` or `module.exports` -```js -import EventEmitter from 'events'; -const e = new EventEmitter(); -``` +In most cases, the ES module `import` can be used to load CommonJS modules. + +If needed, a `require` function can be constructed within an ES module using +[`module.createRequire()`][]. +#### No `__filename` or `__dirname` + +These CommonJS variables are not available in ES modules. + +`__filename` and `__dirname` use cases can be replicated via +[`import.meta.url`][]. + +#### No JSON Module Loading + +JSON imports are still experimental and only supported via the +`--experimental-json-modules` flag. + +Local JSON files can be loaded relative to `import.meta.url` with `fs` directly: + + ```js -import { readFile } from 'fs'; -readFile('./foo.txt', (err, source) => { - if (err) { - console.error(err); - } else { - console.log(source); - } -}); +import { readFile } from 'fs/promises'; +const json = JSON.parse(await readFile(new URL('./dat.json', import.meta.url))); ``` -```js -import fs, { readFileSync } from 'fs'; -import { syncBuiltinESMExports } from 'module'; +Alterantively `module.createRequire()` can be used. -fs.readFileSync = () => Buffer.from('Hello, ESM'); -syncBuiltinESMExports(); +#### No Native Module Loading -fs.readFileSync === readFileSync; -``` +Native modules are not currently supported with ES module imports. -## CommonJS, JSON, and native modules +The can instead be loaded with [`module.createRequire()`][] or +[`process.dlopen`][]. -CommonJS, JSON, and native modules can be used with -[`module.createRequire()`][]. +#### No `require.resolve` -```js -// cjs.cjs -module.exports = 'cjs'; +Relative resolution can be handled via `new URL('./local', import.meta.url)`. -// esm.mjs -import { createRequire } from 'module'; +For a complete `require.resolve` replacement, there is a flagged experimental +[`import.meta.resolve`][] API. -const require = createRequire(import.meta.url); +Alternatively `module.createRequire()` can be used. -const cjs = require('./cjs.cjs'); -cjs === 'cjs'; // true -``` +#### No `NODE_PATH` + +`NODE_PATH` is not part of resolving `import` specifiers. Please use symlinks +if this behavior is desired. -## Experimental JSON modules +#### No `require.extensions` + +`require.extensions` is not used by `import`. The expectation is that loader +hooks can provide this workflow in the future. + +#### No `require.cache` + +`require.cache` is not used by `import` as the ES module loader has its own +separate cache. + + + +## JSON modules + +> Stability: 1 - Experimental Currently importing JSON modules are only supported in the `commonjs` mode and are loaded using the CJS loader. [WHATWG JSON modules specification][] are @@ -478,7 +500,11 @@ node index.mjs # fails node --experimental-json-modules index.mjs # works ``` -## Experimental Wasm modules + + +## Wasm modules + +> Stability: 1 - Experimental Importing Web Assembly modules is supported under the `--experimental-wasm-modules` flag, allowing any `.wasm` files to be @@ -502,7 +528,11 @@ node --experimental-wasm-modules index.mjs would provide the exports interface for the instantiation of `module.wasm`. -## Experimental top-level `await` + + +## Top-level `await` + +> Stability: 1 - Experimental The `await` keyword may be used in the top level (outside of async functions) within modules as per the [ECMAScript Top-Level `await` proposal][]. @@ -526,7 +556,11 @@ console.log(five); // Logs `5` node b.mjs # works ``` -## Experimental loaders + + +## Loaders + +> Stability: 1 - Experimental **Note: This API is currently being redesigned and will still change.** @@ -1237,6 +1271,8 @@ _internal_, _conditions_) ### Customizing ESM specifier resolution algorithm +> Stability: 1 - Experimental + The current specifier resolution does not support all default behavior of the CommonJS loader. One of the behavior differences is automatic resolution of file extensions and the ability to import directories that have an index @@ -1267,8 +1303,9 @@ success! [ECMAScript-modules implementation]: https://github.com/nodejs/modules/blob/master/doc/plan-for-new-modules-implementation.md [ES Module Integration Proposal for Web Assembly]: https://github.com/webassembly/esm-integration [Node.js EP for ES Modules]: https://github.com/nodejs/node-eps/blob/master/002-es-modules.md +[Node.js Module Resolution Algorithm]: #esm_resolver_algorithm_specification [Terminology]: #esm_terminology -[Top-Level Await]: https://github.com/tc39/proposal-top-level-await +[URL]: https://url.spec.whatwg.org/ [WHATWG JSON modules specification]: https://html.spec.whatwg.org/#creating-a-json-module-script [`"exports"`]: packages.md#packages_exports [`"type"`]: packages.md#packages_type @@ -1279,15 +1316,19 @@ success! [`data:` URLs]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs [`export`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/export [`import()`]: #esm_import_expressions -[`import.meta.url`]: #esm_import_meta +[`import.meta.url`]: #esm_import_meta_url +[`import.meta.resolve`]: #esm_import_meta_resolve_specifier_parent [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`module.createRequire()`]: module.md#module_module_createrequire_filename [`module.syncBuiltinESMExports()`]: module.md#module_module_syncbuiltinesmexports [`package.json`]: packages.md#packages_node_js_package_json_field_definitions +[`process.dlopen`]: process.md#process_process_dlopen_module_filename_flags [`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String [`util.TextDecoder`]: util.md#util_class_util_textdecoder [cjs-module-lexer]: https://github.com/guybedford/cjs-module-lexer/tree/1.0.0 +[custom https loader]: #esm_https_loader [special scheme]: https://url.spec.whatwg.org/#special-scheme [the official standard format]: https://tc39.github.io/ecma262/#sec-modules [transpiler loader example]: #esm_transpiler_loader +[url.pathToFileURL]: url.md#url_url_pathtofileurl_path diff --git a/doc/api/process.md b/doc/api/process.md index f2dd0c18d5cc00..d368b0aa573fa8 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -885,31 +885,29 @@ changes: * `filename` {string} * `flags` {os.constants.dlopen} **Default:** `os.constants.dlopen.RTLD_LAZY` -The `process.dlopen()` method allows to dynamically load shared -objects. It is primarily used by `require()` to load -C++ Addons, and should not be used directly, except in special -cases. In other words, [`require()`][] should be preferred over -`process.dlopen()`, unless there are specific reasons. +The `process.dlopen()` method allows dynamically loading shared objects. It is +primarily used by `require()` to load C++ Addons, and should not be used +directly, except in special cases. In other words, [`require()`][] should be +preferred over `process.dlopen()` unless there are specific reasons such as +custom dlopen flags or loading from ES modules. The `flags` argument is an integer that allows to specify dlopen behavior. See the [`os.constants.dlopen`][] documentation for details. -If there are specific reasons to use `process.dlopen()` (for instance, -to specify dlopen flags), it's often useful to use [`require.resolve()`][] -to look up the module's path. +An important requirement when calling `process.dlopen()` is that the `module` +instance must be passed. Functions exported by the C++ Addon are then +accessible via `module.exports`. -An important drawback when calling `process.dlopen()` is that the `module` -instance must be passed. Functions exported by the C++ Addon will be accessible -via `module.exports`. - -The example below shows how to load a C++ Addon, named as `binding`, -that exports a `foo` function. All the symbols will be loaded before +The example below shows how to load a C++ Addon, named `local.node`, +that exports a `foo` function. All the symbols are loaded before the call returns, by passing the `RTLD_NOW` constant. In this example the constant is assumed to be available. ```js const os = require('os'); -process.dlopen(module, require.resolve('binding'), +const path = require('path'); +const module = { exports: {} }; +process.dlopen(module, path.join(__dirname, 'local.node'), os.constants.dlopen.RTLD_NOW); module.exports.foo(); ``` @@ -2678,7 +2676,6 @@ cases: [`readable.read()`]: stream.md#stream_readable_read_size [`require()`]: globals.md#globals_require [`require.main`]: modules.md#modules_accessing_the_main_module -[`require.resolve()`]: modules.md#modules_require_resolve_request_options [`subprocess.kill()`]: child_process.md#child_process_subprocess_kill_signal [`v8.setFlagsFromString()`]: v8.md#v8_v8_setflagsfromstring_flags [debugger]: debugger.md From 89da0c3353b9c8a9232baa0c360739fab8cfcbd6 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Fri, 13 Nov 2020 13:07:36 -0800 Subject: [PATCH 45/98] n-api: factor out calling pattern Factor out how we handle a `napi_status`-valued return internally. Signed-off-by: Gabriel Schulhof PR-URL: https://github.com/nodejs/node/pull/36113 Reviewed-By: Stephen Belanger Reviewed-By: Rich Trott --- src/js_native_api_v8.cc | 48 ++++++++++++----------------------------- src/js_native_api_v8.h | 6 ++++++ src/node_api.cc | 5 +---- 3 files changed, 21 insertions(+), 38 deletions(-) diff --git a/src/js_native_api_v8.cc b/src/js_native_api_v8.cc index e7a16401369f42..0a3494ce700c93 100644 --- a/src/js_native_api_v8.cc +++ b/src/js_native_api_v8.cc @@ -816,12 +816,7 @@ napi_status napi_define_class(napi_env env, } v8::Local property_name; - napi_status status = - v8impl::V8NameFromPropertyDescriptor(env, p, &property_name); - - if (status != napi_ok) { - return napi_set_last_error(env, status); - } + STATUS_CALL(v8impl::V8NameFromPropertyDescriptor(env, p, &property_name)); v8::PropertyAttribute attributes = v8impl::V8PropertyAttributesFromDescriptor(p); @@ -888,12 +883,10 @@ napi_status napi_define_class(napi_env env, } } - napi_status status = - napi_define_properties(env, - *result, - static_descriptors.size(), - static_descriptors.data()); - if (status != napi_ok) return status; + STATUS_CALL(napi_define_properties(env, + *result, + static_descriptors.size(), + static_descriptors.data())); } return GET_RETURN_STATUS(env); @@ -1268,12 +1261,7 @@ napi_status napi_define_properties(napi_env env, const napi_property_descriptor* p = &properties[i]; v8::Local property_name; - napi_status status = - v8impl::V8NameFromPropertyDescriptor(env, p, &property_name); - - if (status != napi_ok) { - return napi_set_last_error(env, status); - } + STATUS_CALL(v8impl::V8NameFromPropertyDescriptor(env, p, &property_name)); if (p->getter != nullptr || p->setter != nullptr) { v8::Local local_getter; @@ -1724,8 +1712,7 @@ napi_status napi_create_error(napi_env env, v8::Local error_obj = v8::Exception::Error(message_value.As()); - napi_status status = set_error_code(env, error_obj, code, nullptr); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, code, nullptr)); *result = v8impl::JsValueFromV8LocalValue(error_obj); @@ -1745,8 +1732,7 @@ napi_status napi_create_type_error(napi_env env, v8::Local error_obj = v8::Exception::TypeError(message_value.As()); - napi_status status = set_error_code(env, error_obj, code, nullptr); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, code, nullptr)); *result = v8impl::JsValueFromV8LocalValue(error_obj); @@ -1766,8 +1752,7 @@ napi_status napi_create_range_error(napi_env env, v8::Local error_obj = v8::Exception::RangeError(message_value.As()); - napi_status status = set_error_code(env, error_obj, code, nullptr); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, code, nullptr)); *result = v8impl::JsValueFromV8LocalValue(error_obj); @@ -1947,8 +1932,7 @@ napi_status napi_throw_error(napi_env env, CHECK_NEW_FROM_UTF8(env, str, msg); v8::Local error_obj = v8::Exception::Error(str); - napi_status status = set_error_code(env, error_obj, nullptr, code); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, nullptr, code)); isolate->ThrowException(error_obj); // any VM calls after this point and before returning @@ -1966,8 +1950,7 @@ napi_status napi_throw_type_error(napi_env env, CHECK_NEW_FROM_UTF8(env, str, msg); v8::Local error_obj = v8::Exception::TypeError(str); - napi_status status = set_error_code(env, error_obj, nullptr, code); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, nullptr, code)); isolate->ThrowException(error_obj); // any VM calls after this point and before returning @@ -1985,8 +1968,7 @@ napi_status napi_throw_range_error(napi_env env, CHECK_NEW_FROM_UTF8(env, str, msg); v8::Local error_obj = v8::Exception::RangeError(str); - napi_status status = set_error_code(env, error_obj, nullptr, code); - if (status != napi_ok) return status; + STATUS_CALL(set_error_code(env, error_obj, nullptr, code)); isolate->ThrowException(error_obj); // any VM calls after this point and before returning @@ -2785,15 +2767,13 @@ napi_status napi_create_external_arraybuffer(napi_env env, // and is able to use napi_env. Implementing that properly is hard, so use the // `Buffer` variant for easier implementation. napi_value buffer; - napi_status status; - status = napi_create_external_buffer( + STATUS_CALL(napi_create_external_buffer( env, byte_length, external_data, finalize_cb, finalize_hint, - &buffer); - if (status != napi_ok) return status; + &buffer)); return napi_get_typedarray_info( env, buffer, diff --git a/src/js_native_api_v8.h b/src/js_native_api_v8.h index 06b8049ec46db0..1a62782c1ad24f 100644 --- a/src/js_native_api_v8.h +++ b/src/js_native_api_v8.h @@ -337,4 +337,10 @@ class TryCatch : public v8::TryCatch { } // end of namespace v8impl +#define STATUS_CALL(call) \ + do { \ + napi_status status = (call); \ + if (status != napi_ok) return status; \ + } while (0) + #endif // SRC_JS_NATIVE_API_V8_H_ diff --git a/src/node_api.cc b/src/node_api.cc index 4e932c19c2bf8a..f1a5265b6a7234 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -1130,11 +1130,8 @@ napi_status napi_queue_async_work(napi_env env, napi_async_work work) { CHECK_ENV(env); CHECK_ARG(env, work); - napi_status status; uv_loop_t* event_loop = nullptr; - status = napi_get_uv_event_loop(env, &event_loop); - if (status != napi_ok) - return napi_set_last_error(env, status); + STATUS_CALL(napi_get_uv_event_loop(env, &event_loop)); uvimpl::Work* w = reinterpret_cast(work); From eb9295b5839bb7a2a3d1da37d18c0f8e2e4d9763 Mon Sep 17 00:00:00 2001 From: Benjamin Gruenbaum Date: Wed, 11 Nov 2020 14:52:07 +0200 Subject: [PATCH 46/98] promise: emit error on domain unhandled rejections PR-URL: https://github.com/nodejs/node/pull/36082 Reviewed-By: Ruben Bridgewater Reviewed-By: Yongsheng Zhang Reviewed-By: Rich Trott --- lib/internal/process/promises.js | 22 ++++++++++------ test/parallel/test-domain-promise.js | 10 ++++++++ .../test-promises-unhandled-rejections.js | 25 ------------------- 3 files changed, 24 insertions(+), 33 deletions(-) diff --git a/lib/internal/process/promises.js b/lib/internal/process/promises.js index 2b806e7e8e41b4..ad21152dd12a08 100644 --- a/lib/internal/process/promises.js +++ b/lib/internal/process/promises.js @@ -117,7 +117,8 @@ function unhandledRejection(promise, reason) { maybeUnhandledPromises.set(promise, { reason, uid: ++lastPromiseId, - warned: false + warned: false, + domain: process.domain }); // This causes the promise to be referenced at least for one tick. pendingUnhandledRejections.push(promise); @@ -192,26 +193,32 @@ function processPromiseRejections() { } promiseInfo.warned = true; const { reason, uid } = promiseInfo; + function emit(reason, promise, promiseInfo) { + if (promiseInfo.domain) { + return promiseInfo.domain.emit('error', reason); + } + return process.emit('unhandledRejection', reason, promise); + } switch (unhandledRejectionsMode) { case kStrictUnhandledRejections: { const err = reason instanceof Error ? reason : generateUnhandledRejectionError(reason); triggerUncaughtException(err, true /* fromPromise */); - const handled = process.emit('unhandledRejection', reason, promise); + const handled = emit(reason, promise, promiseInfo); if (!handled) emitUnhandledRejectionWarning(uid, reason); break; } case kIgnoreUnhandledRejections: { - process.emit('unhandledRejection', reason, promise); + emit(reason, promise, promiseInfo); break; } case kAlwaysWarnUnhandledRejections: { - process.emit('unhandledRejection', reason, promise); + emit(reason, promise, promiseInfo); emitUnhandledRejectionWarning(uid, reason); break; } case kThrowUnhandledRejections: { - const handled = process.emit('unhandledRejection', reason, promise); + const handled = emit(reason, promise, promiseInfo); if (!handled) { const err = reason instanceof Error ? reason : generateUnhandledRejectionError(reason); @@ -220,7 +227,7 @@ function processPromiseRejections() { break; } case kWarnWithErrorCodeUnhandledRejections: { - const handled = process.emit('unhandledRejection', reason, promise); + const handled = emit(reason, promise, promiseInfo); if (!handled) { emitUnhandledRejectionWarning(uid, reason); process.exitCode = 1; @@ -266,10 +273,9 @@ function generateUnhandledRejectionError(reason) { function listenForRejections() { setPromiseRejectCallback(promiseRejectHandler); } - module.exports = { hasRejectionToWarn, setHasRejectionToWarn, listenForRejections, - processPromiseRejections + processPromiseRejections, }; diff --git a/test/parallel/test-domain-promise.js b/test/parallel/test-domain-promise.js index 2a127f3d40272b..d3b24eba9fb8ec 100644 --- a/test/parallel/test-domain-promise.js +++ b/test/parallel/test-domain-promise.js @@ -126,3 +126,13 @@ process.on('warning', common.mustNotCall()); })); })); } +{ + // Unhandled rejections become errors on the domain + const d = domain.create(); + d.on('error', common.mustCall((e) => { + assert.strictEqual(e.message, 'foo'); + })); + d.run(common.mustCall(() => { + Promise.reject(new Error('foo')); + })); +} diff --git a/test/parallel/test-promises-unhandled-rejections.js b/test/parallel/test-promises-unhandled-rejections.js index bfde806b0572ec..fc2ad945bae36b 100644 --- a/test/parallel/test-promises-unhandled-rejections.js +++ b/test/parallel/test-promises-unhandled-rejections.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); -const domain = require('domain'); const { inspect } = require('util'); common.disableCrashOnUnhandledRejection(); @@ -622,30 +621,6 @@ asyncTest('setImmediate + promise microtasks is too late to attach a catch' + }); }); -asyncTest( - 'Promise unhandledRejection handler does not interfere with domain' + - ' error handlers being given exceptions thrown from nextTick.', - function(done) { - const d = domain.create(); - let domainReceivedError; - d.on('error', function(e) { - domainReceivedError = e; - }); - d.run(function() { - const e = new Error('error'); - const domainError = new Error('domain error'); - onUnhandledSucceed(done, function(reason, promise) { - assert.strictEqual(reason, e); - assert.strictEqual(domainReceivedError, domainError); - }); - Promise.reject(e); - process.nextTick(function() { - throw domainError; - }); - }); - } -); - asyncTest('nextTick is immediately scheduled when called inside an event' + ' handler', function(done) { clean(); From 9ce9b016e623f149292f1dfe3037246810c84b86 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Fri, 6 Nov 2020 08:09:42 -0800 Subject: [PATCH 47/98] events: add max listener warning for EventTarget Signed-off-by: James M Snell PR-URL: https://github.com/nodejs/node/pull/36001 Fixes: https://github.com/nodejs/node/issues/35990 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Rich Trott --- doc/api/events.md | 23 ++++++ lib/events.js | 47 +++++++++++ lib/internal/event_target.js | 59 +++++++------- .../test-eventtarget-memoryleakwarning.js | 79 +++++++++++++++++++ 4 files changed, 178 insertions(+), 30 deletions(-) create mode 100644 test/parallel/test-eventtarget-memoryleakwarning.js diff --git a/doc/api/events.md b/doc/api/events.md index 5c6b058123ca57..1ce815ce92cbe3 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -383,6 +383,29 @@ Installing a listener using this symbol does not change the behavior once an `'error'` event is emitted, therefore the process will still crash if no regular `'error'` listener is installed. +### `EventEmitter.setMaxListeners(n[, ...eventTargets])` + + +* `n` {number} A non-negative number. The maximum number of listeners per + `EventTarget` event. +* `...eventsTargets` {EventTarget[]|EventEmitter[]} Zero or more {EventTarget} + or {EventEmitter} instances. If none are specified, `n` is set as the default + max for all newly created {EventTarget} and {EventEmitter} objects. + +```js +const { + setMaxListeners, + EventEmitter +} = require('events'); + +const target = new EventTarget(); +const emitter = new EventEmitter(); + +setMaxListeners(5, target, emitter); +``` + ### `emitter.addListener(eventName, listener)` -> Stability: 1 - Experimental +> Stability: 2 - Stable ## Introduction @@ -61,15 +65,9 @@ console.log(addTwo(4)); ``` Node.js fully supports ECMAScript modules as they are currently specified and -provides limited interoperability between them and the existing module format, +provides interoperability between them and its original module format, [CommonJS][]. -Node.js contains support for ES Modules based upon the -[Node.js EP for ES Modules][] and the [ECMAScript-modules implementation][]. - -Expect major changes in the implementation including interoperability support, -specifier resolution, and default behavior. - @@ -1300,9 +1298,7 @@ success! [Core modules]: modules.md#modules_core_modules [Dynamic `import()`]: https://wiki.developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import#Dynamic_Imports [ECMAScript Top-Level `await` proposal]: https://github.com/tc39/proposal-top-level-await/ -[ECMAScript-modules implementation]: https://github.com/nodejs/modules/blob/master/doc/plan-for-new-modules-implementation.md [ES Module Integration Proposal for Web Assembly]: https://github.com/webassembly/esm-integration -[Node.js EP for ES Modules]: https://github.com/nodejs/node-eps/blob/master/002-es-modules.md [Node.js Module Resolution Algorithm]: #esm_resolver_algorithm_specification [Terminology]: #esm_terminology [URL]: https://url.spec.whatwg.org/ From 73bb54af773dbc8bb8b4b6be6a3b03c3b24d2e84 Mon Sep 17 00:00:00 2001 From: Daijiro Wachi Date: Sat, 7 Nov 2020 17:25:06 +0900 Subject: [PATCH 66/98] test: update wpt url and resource MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/web-platform-tests/wpt/pull/26317 PR-URL: https://github.com/nodejs/node/pull/36032 Reviewed-By: Rich Trott Reviewed-By: Michaël Zasso Reviewed-By: Joyee Cheung --- test/fixtures/wpt/README.md | 6 +- test/fixtures/wpt/interfaces/html.idl | 26 +- .../fixtures/wpt/resources/test-only-api.m.js | 5 + .../wpt/resources/test-only-api.m.js.headers | 2 + .../wpt/resources/testdriver-actions.js | 109 +++++- test/fixtures/wpt/resources/testdriver.js | 321 +++++++----------- test/fixtures/wpt/resources/testharness.js | 93 ++++- test/fixtures/wpt/url/README.md | 4 + .../wpt/url/percent-encoding.window.js | 33 ++ .../wpt/url/resources/percent-encoding.json | 48 +++ test/fixtures/wpt/versions.json | 8 +- test/wpt/status/url.json | 4 + 12 files changed, 437 insertions(+), 222 deletions(-) create mode 100644 test/fixtures/wpt/resources/test-only-api.m.js create mode 100644 test/fixtures/wpt/resources/test-only-api.m.js.headers create mode 100644 test/fixtures/wpt/url/percent-encoding.window.js create mode 100644 test/fixtures/wpt/url/resources/percent-encoding.json diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 096c876f7b2956..855fca6d179ff6 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -12,9 +12,9 @@ Last update: - console: https://github.com/web-platform-tests/wpt/tree/3b1f72e99a/console - encoding: https://github.com/web-platform-tests/wpt/tree/1821fb5f77/encoding -- url: https://github.com/web-platform-tests/wpt/tree/54c6d64be0/url -- resources: https://github.com/web-platform-tests/wpt/tree/1d14e821b9/resources -- interfaces: https://github.com/web-platform-tests/wpt/tree/15e47f779c/interfaces +- url: https://github.com/web-platform-tests/wpt/tree/09d8830be1/url +- resources: https://github.com/web-platform-tests/wpt/tree/001e50de41/resources +- interfaces: https://github.com/web-platform-tests/wpt/tree/8719553b2d/interfaces - html/webappapis/microtask-queuing: https://github.com/web-platform-tests/wpt/tree/2c5c3c4c27/html/webappapis/microtask-queuing - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/264f12bc7b/html/webappapis/timers - hr-time: https://github.com/web-platform-tests/wpt/tree/a5d1774ecf/hr-time diff --git a/test/fixtures/wpt/interfaces/html.idl b/test/fixtures/wpt/interfaces/html.idl index e92bb8690dddd3..bf8da3733ec26f 100644 --- a/test/fixtures/wpt/interfaces/html.idl +++ b/test/fixtures/wpt/interfaces/html.idl @@ -436,7 +436,6 @@ interface HTMLIFrameElement : HTMLElement { [SameObject, PutForwards=value] readonly attribute DOMTokenList sandbox; [CEReactions] attribute DOMString allow; [CEReactions] attribute boolean allowFullscreen; - [CEReactions] attribute boolean allowPaymentRequest; [CEReactions] attribute DOMString width; [CEReactions] attribute DOMString height; [CEReactions] attribute DOMString referrerPolicy; @@ -1564,16 +1563,18 @@ dictionary ElementDefinitionOptions { [Exposed=Window] interface ElementInternals { - // Form-associated custom elements + // Shadow root access + readonly attribute ShadowRoot? shadowRoot; + // Form-associated custom elements undefined setFormValue((File or USVString or FormData)? value, - optional (File or USVString or FormData)? state); + optional (File or USVString or FormData)? state); readonly attribute HTMLFormElement? form; undefined setValidity(optional ValidityStateFlags flags = {}, - optional DOMString message, - optional HTMLElement anchor); + optional DOMString message, + optional HTMLElement anchor); readonly attribute boolean willValidate; readonly attribute ValidityState validity; readonly attribute DOMString validationMessage; @@ -1583,6 +1584,9 @@ interface ElementInternals { readonly attribute NodeList labels; }; +// Accessibility semantics +ElementInternals includes ARIAMixin; + dictionary ValidityStateFlags { boolean valueMissing = false; boolean typeMismatch = false; @@ -2353,6 +2357,18 @@ interface WorkerLocation { readonly attribute USVString hash; }; +[Exposed=Worklet, SecureContext] +interface WorkletGlobalScope {}; + +[Exposed=Window, SecureContext] +interface Worklet { + [NewObject] Promise addModule(USVString moduleURL, optional WorkletOptions options = {}); +}; + +dictionary WorkletOptions { + RequestCredentials credentials = "same-origin"; +}; + [Exposed=Window] interface Storage { readonly attribute unsigned long length; diff --git a/test/fixtures/wpt/resources/test-only-api.m.js b/test/fixtures/wpt/resources/test-only-api.m.js new file mode 100644 index 00000000000000..984f635abac002 --- /dev/null +++ b/test/fixtures/wpt/resources/test-only-api.m.js @@ -0,0 +1,5 @@ +/* Whether the browser is Chromium-based with MojoJS enabled */ +export const isChromiumBased = 'MojoInterfaceInterceptor' in self; + +/* Whether the browser is WebKit-based with internal test-only API enabled */ +export const isWebKitBased = !isChromiumBased && 'internals' in self; diff --git a/test/fixtures/wpt/resources/test-only-api.m.js.headers b/test/fixtures/wpt/resources/test-only-api.m.js.headers new file mode 100644 index 00000000000000..5e8f640c6659d1 --- /dev/null +++ b/test/fixtures/wpt/resources/test-only-api.m.js.headers @@ -0,0 +1,2 @@ +Content-Type: text/javascript; charset=utf-8 +Cache-Control: max-age=3600 diff --git a/test/fixtures/wpt/resources/testdriver-actions.js b/test/fixtures/wpt/resources/testdriver-actions.js index 870a2e8e266780..f3e6388e8acd19 100644 --- a/test/fixtures/wpt/resources/testdriver-actions.js +++ b/test/fixtures/wpt/resources/testdriver-actions.js @@ -9,6 +9,7 @@ function Actions(defaultTickDuration=16) { this.sourceTypes = new Map([["key", KeySource], ["pointer", PointerSource], + ["wheel", WheelSource], ["none", GeneralSource]]); this.sources = new Map(); this.sourceOrder = []; @@ -22,6 +23,7 @@ this.createSource("none"); this.tickIdx = 0; this.defaultTickDuration = defaultTickDuration; + this.context = null; } Actions.prototype = { @@ -65,7 +67,17 @@ } catch(e) { return Promise.reject(e); } - return test_driver.action_sequence(actions); + return test_driver.action_sequence(actions, this.context); + }, + + /** + * Set the context for the actions + * + * @param {WindowProxy} context - Context in which to run the action sequence + */ + setContext: function(context) { + this.context = context; + return this; }, /** @@ -73,7 +85,7 @@ * If no name is passed, a new source with the given type is * created. * - * @param {String} type - Source type ('none', 'key', or 'pointer') + * @param {String} type - Source type ('none', 'key', 'pointer', or 'wheel') * @param {String?} name - Name of the source * @returns {Source} Source object for that source. */ @@ -154,6 +166,32 @@ return this; }, + /** + * Add a new wheel input source with the given name + * + * @param {String} type - Name of the wheel source + * @param {Bool} set - Set source as the default wheel source + * @returns {Actions} + */ + addWheel: function(name, set=true) { + this.createSource("wheel", name); + if (set) { + this.setWheel(name); + } + return this; + }, + + /** + * Set the current default wheel source + * + * @param {String} name - Name of the wheel source + * @returns {Actions} + */ + setWheel: function(name) { + this.setSource("wheel", name); + return this; + }, + createSource: function(type, name, parameters={}) { if (!this.sources.has(type)) { throw new Error(`${type} is not a valid action type`); @@ -196,8 +234,9 @@ * * @param {Number?} duration - Minimum length of the tick in ms. * @param {String} sourceType - source type - * @param {String?} sourceName - Named key or pointer source to use or null for the default - * key or pointer source + * @param {String?} sourceName - Named key, pointer or wheel source to use + * or null for the default key, pointer or + * wheel source * @returns {Actions} */ pause: function(duration=0, sourceType="none", {sourceName=null}={}) { @@ -280,6 +319,27 @@ source.pointerMove(this, x, y, duration, origin); return this; }, + + /** + * Create a scroll event for the current default wheel source + * + * @param {Number} x - mouse cursor x coordinate + * @param {Number} y - mouse cursor y coordinate + * @param {Number} deltaX - scroll delta value along the x-axis in pixels + * @param {Number} deltaY - scroll delta value along the y-axis in pixels + * @param {String|Element} origin - Origin of the coordinate system. + * Either "viewport" or an Element + * @param {Number?} duration - Time in ms for the scroll + * @param {String?} sourceName - Named wheel source to use or null for the + * default wheel source + * @returns {Actions} + */ + scroll: function(x, y, deltaX, deltaY, + {origin="viewport", duration, sourceName=null}={}) { + let source = this.getSource("wheel", sourceName); + source.scroll(this, x, y, deltaX, deltaY, duration, origin); + return this; + }, }; function GeneralSource() { @@ -417,5 +477,46 @@ }, }; + function WheelSource() { + this.actions = new Map(); + } + + WheelSource.prototype = { + serialize: function(tickCount) { + if (!this.actions.size) { + return undefined; + } + let actions = []; + let data = {"type": "wheel", "actions": actions}; + for (let i=0; i button.addEventListener("click", resolve)); - test_driver.click(button).catch(reject); - }).then(function() { + return test_driver.click(button) + .then(wait_click) + .then(function() { button.remove(); if (typeof action === "function") { return action(); } + return null; }); }, @@ -95,14 +127,6 @@ * the cases the WebDriver command errors */ click: function(element) { - if (window.top !== window) { - return Promise.reject(new Error("can only click in top-level window")); - } - - if (!window.document.contains(element)) { - return Promise.reject(new Error("element in different document or shadow tree")); - } - if (!inView(element)) { element.scrollIntoView({behavior: "instant", block: "end", @@ -135,14 +159,6 @@ * the cases the WebDriver command errors */ send_keys: function(element, keys) { - if (window.top !== window) { - return Promise.reject(new Error("can only send keys in top-level window")); - } - - if (!window.document.contains(element)) { - return Promise.reject(new Error("element in different document or shadow tree")); - } - if (!inView(element)) { element.scrollIntoView({behavior: "instant", block: "end", @@ -166,34 +182,42 @@ * https://github.com/WICG/page-lifecycle/blob/master/README.md|Lifecycle API * for Web Pages} * + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * * @returns {Promise} fulfilled after the freeze request is sent, or rejected * in case the WebDriver command errors */ - freeze: function() { + freeze: function(context=null) { return window.test_driver_internal.freeze(); }, /** * Send a sequence of actions * - * This function sends a sequence of actions to the top level window + * This function sends a sequence of actions * to perform. It is modeled after the behaviour of {@link * https://w3c.github.io/webdriver/#actions|WebDriver Actions Command} * * @param {Array} actions - an array of actions. The format is the same as the actions - property of the WebDriver command {@link - https://w3c.github.io/webdriver/#perform-actions|Perform - Actions} command. Each element is an object representing an - input source and each input source itself has an actions - property detailing the behaviour of that source at each timestep - (or tick). Authors are not expected to construct the actions - sequence by hand, but to use the builder api provided in - testdriver-actions.js + * property of the WebDriver command {@link + * https://w3c.github.io/webdriver/#perform-actions|Perform + * Actions} command. Each element is an object representing an + * input source and each input source itself has an actions + * property detailing the behaviour of that source at each timestep + * (or tick). Authors are not expected to construct the actions + * sequence by hand, but to use the builder api provided in + * testdriver-actions.js + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * * @returns {Promise} fufiled after the actions are performed, or rejected in * the cases the WebDriver command errors */ - action_sequence: function(actions) { - return window.test_driver_internal.action_sequence(actions); + action_sequence: function(actions, context=null) { + return window.test_driver_internal.action_sequence(actions, context); }, /** @@ -203,11 +227,15 @@ * by ReportingObserver) for testing purposes, as described in * {@link https://w3c.github.io/reporting/#generate-test-report-command} * + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * * @returns {Promise} fulfilled after the report is generated, or * rejected if the report generation fails */ - generate_test_report: function(message) { - return window.test_driver_internal.generate_test_report(message); + generate_test_report: function(message, context=null) { + return window.test_driver_internal.generate_test_report(message, context); }, /** @@ -221,6 +249,9 @@ * object * @param {String} state - the state of the permission * @param {boolean} one_realm - Optional. Whether the permission applies to only one realm + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * The above params are used to create a [PermissionSetParameters]{@link * https://w3c.github.io/permissions/#dictdef-permissionsetparameters} object @@ -228,13 +259,13 @@ * @returns {Promise} fulfilled after the permission is set, or rejected if setting the * permission fails */ - set_permission: function(descriptor, state, one_realm) { + set_permission: function(descriptor, state, one_realm, context=null) { let permission_params = { descriptor, state, oneRealm: one_realm, }; - return window.test_driver_internal.set_permission(permission_params); + return window.test_driver_internal.set_permission(permission_params, context); }, /** @@ -247,12 +278,16 @@ * @param {Object} config - an [Authenticator Configuration]{@link * https://w3c.github.io/webauthn/#authenticator-configuration} * object + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. + * * @returns {Promise} fulfilled after the authenticator is added, or * rejected in the cases the WebDriver command * errors. Returns the ID of the authenticator */ - add_virtual_authenticator: function(config) { - return window.test_driver_internal.add_virtual_authenticator(config); + add_virtual_authenticator: function(config, context=null) { + return window.test_driver_internal.add_virtual_authenticator(config, context); }, /** @@ -264,13 +299,16 @@ * * @param {String} authenticator_id - the ID of the authenticator to be * removed. + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} fulfilled after the authenticator is removed, or * rejected in the cases the WebDriver command * errors */ - remove_virtual_authenticator: function(authenticator_id) { - return window.test_driver_internal.remove_virtual_authenticator(authenticator_id); + remove_virtual_authenticator: function(authenticator_id, context=null) { + return window.test_driver_internal.remove_virtual_authenticator(authenticator_id, context); }, /** @@ -282,13 +320,16 @@ * @param {Object} credential - A [Credential Parameters]{@link * https://w3c.github.io/webauthn/#credential-parameters} * object + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} fulfilled after the credential is added, or * rejected in the cases the WebDriver command * errors */ - add_credential: function(authenticator_id, credential) { - return window.test_driver_internal.add_credential(authenticator_id, credential); + add_credential: function(authenticator_id, credential, context=null) { + return window.test_driver_internal.add_credential(authenticator_id, credential, context); }, /** @@ -300,6 +341,9 @@ * https://w3c.github.io/webauthn/#sctn-automation-get-credentials * * @param {String} authenticator_id - the ID of the authenticator + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} fulfilled after the credentials are returned, or * rejected in the cases the WebDriver command @@ -307,8 +351,8 @@ * Parameters]{@link * https://w3c.github.io/webauthn/#credential-parameters} */ - get_credentials: function(authenticator_id) { - return window.test_driver_internal.get_credentials(authenticator_id); + get_credentials: function(authenticator_id, context=null) { + return window.test_driver_internal.get_credentials(authenticator_id, context=null); }, /** @@ -318,13 +362,16 @@ * * @param {String} authenticator_id - the ID of the authenticator * @param {String} credential_id - the ID of the credential + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} fulfilled after the credential is removed, or * rejected in the cases the WebDriver command * errors. */ - remove_credential: function(authenticator_id, credential_id) { - return window.test_driver_internal.remove_credential(authenticator_id, credential_id); + remove_credential: function(authenticator_id, credential_id, context=null) { + return window.test_driver_internal.remove_credential(authenticator_id, credential_id, context); }, /** @@ -333,13 +380,16 @@ * https://w3c.github.io/webauthn/#sctn-automation-remove-all-credentials * * @param {String} authenticator_id - the ID of the authenticator + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} fulfilled after the credentials are removed, or * rejected in the cases the WebDriver command * errors. */ - remove_all_credentials: function(authenticator_id) { - return window.test_driver_internal.remove_all_credentials(authenticator_id); + remove_all_credentials: function(authenticator_id, context=null) { + return window.test_driver_internal.remove_all_credentials(authenticator_id, context); }, /** @@ -351,9 +401,12 @@ * * @param {String} authenticator_id - the ID of the authenticator * @param {boolean} uv - the User Verified flag + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. */ - set_user_verified: function(authenticator_id, uv) { - return window.test_driver_internal.set_user_verified(authenticator_id, uv); + set_user_verified: function(authenticator_id, uv, context=null) { + return window.test_driver_internal.set_user_verified(authenticator_id, uv, context); }, /** @@ -370,16 +423,19 @@ * May be "*" to indicate all origins. * @param {String} state - The storage access setting. * Must be either "allowed" or "blocked". + * @param {WindowProxy} context - Browsing context in which + * to run the call, or null for the current + * browsing context. * * @returns {Promise} Fulfilled after the storage access rule has been * set, or rejected if setting the rule fails. */ - set_storage_access: function(origin, embedding_origin, state) { + set_storage_access: function(origin, embedding_origin, state, context=null) { if (state !== "allowed" && state !== "blocked") { throw new Error("storage access status must be 'allowed' or 'blocked'"); } const blocked = state === "blocked"; - return window.test_driver_internal.set_storage_access(origin, embedding_origin, blocked); + return window.test_driver_internal.set_storage_access(origin, embedding_origin, blocked, context); }, }; @@ -392,13 +448,6 @@ */ in_automation: false, - /** - * Waits for a user-initiated click - * - * @param {Element} element - element to be clicked - * @param {{x: number, y: number} coords - viewport coordinates to click at - * @returns {Promise} fulfilled after click occurs - */ click: function(element, coords) { if (this.in_automation) { return Promise.reject(new Error('Not implemented')); @@ -409,14 +458,6 @@ }); }, - /** - * Waits for an element to receive a series of key presses - * - * @param {Element} element - element which should receve key presses - * @param {String} keys - keys to expect - * @returns {Promise} fulfilled after keys are received or rejected if - * an incorrect key sequence is received - */ send_keys: function(element, keys) { if (this.in_automation) { return Promise.reject(new Error('Not implemented')); @@ -449,158 +490,52 @@ }); }, - /** - * Freeze the current page - * - * @returns {Promise} fulfilled after freeze request is sent, otherwise - * it gets rejected - */ - freeze: function() { + freeze: function(context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Send a sequence of pointer actions - * - * @returns {Promise} fufilled after actions are sent, rejected if any actions - * fail - */ - action_sequence: function(actions) { + action_sequence: function(actions, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Generates a test report on the current page - * - * @param {String} message - the message to be contained in the report - * @returns {Promise} fulfilled after the report is generated, or - * rejected if the report generation fails - */ - generate_test_report: function(message) { + generate_test_report: function(message, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Sets the state of a permission - * - * This function simulates a user setting a permission into a particular state as described - * in {@link https://w3c.github.io/permissions/#set-permission-command} - * - * @param {Object} permission_params - a [PermissionSetParameters]{@lint - * https://w3c.github.io/permissions/#dictdef-permissionsetparameters} - * object - * @returns {Promise} fulfilled after the permission is set, or rejected if setting the - * permission fails - */ - set_permission: function(permission_params) { + set_permission: function(permission_params, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Creates a virtual authenticator - * - * @param {Object} config - the authenticator configuration - * @returns {Promise} fulfilled after the authenticator is added, or - * rejected in the cases the WebDriver command - * errors. - */ - add_virtual_authenticator: function(config) { + add_virtual_authenticator: function(config, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Removes a virtual authenticator - * - * @param {String} authenticator_id - the ID of the authenticator to be - * removed. - * - * @returns {Promise} fulfilled after the authenticator is removed, or - * rejected in the cases the WebDriver command - * errors - */ - remove_virtual_authenticator: function(authenticator_id) { + remove_virtual_authenticator: function(authenticator_id, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Adds a credential to a virtual authenticator - * - * @param {String} authenticator_id - the ID of the authenticator - * @param {Object} credential - A [Credential Parameters]{@link - * https://w3c.github.io/webauthn/#credential-parameters} - * object - * - * @returns {Promise} fulfilled after the credential is added, or - * rejected in the cases the WebDriver command - * errors - * - */ - add_credential: function(authenticator_id, credential) { + add_credential: function(authenticator_id, credential, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Gets all the credentials stored in an authenticator - * - * @param {String} authenticator_id - the ID of the authenticator - * - * @returns {Promise} fulfilled after the credentials are returned, or - * rejected in the cases the WebDriver command - * errors. Returns an array of [Credential - * Parameters]{@link - * https://w3c.github.io/webauthn/#credential-parameters} - * - */ - get_credentials: function(authenticator_id) { + get_credentials: function(authenticator_id, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Remove a credential stored in an authenticator - * - * @param {String} authenticator_id - the ID of the authenticator - * @param {String} credential_id - the ID of the credential - * - * @returns {Promise} fulfilled after the credential is removed, or - * rejected in the cases the WebDriver command - * errors. - * - */ - remove_credential: function(authenticator_id, credential_id) { + remove_credential: function(authenticator_id, credential_id, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Removes all the credentials stored in a virtual authenticator - * - * @param {String} authenticator_id - the ID of the authenticator - * - * @returns {Promise} fulfilled after the credentials are removed, or - * rejected in the cases the WebDriver command - * errors. - * - */ - remove_all_credentials: function(authenticator_id) { + remove_all_credentials: function(authenticator_id, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Sets the User Verified flag on an authenticator - * - * @param {String} authenticator_id - the ID of the authenticator - * @param {boolean} uv - the User Verified flag - * - */ - set_user_verified: function(authenticator_id, uv) { + set_user_verified: function(authenticator_id, uv, context=null) { return Promise.reject(new Error("unimplemented")); }, - /** - * Sets the storage access policy for a third-party origin when loaded - * in the current first party context - */ - set_storage_access: function(origin, embedding_origin, blocked) { + set_storage_access: function(origin, embedding_origin, blocked, context=null) { return Promise.reject(new Error("unimplemented")); }, }; diff --git a/test/fixtures/wpt/resources/testharness.js b/test/fixtures/wpt/resources/testharness.js index 61eadfeb6541a9..d50e094117df56 100644 --- a/test/fixtures/wpt/resources/testharness.js +++ b/test/fixtures/wpt/resources/testharness.js @@ -520,6 +520,43 @@ policies and contribution forms [3]. Object.prototype.toString.call(worker) == '[object ServiceWorker]'; } + var seen_func_name = Object.create(null); + + function get_test_name(func, name) + { + if (name) { + return name; + } + + if (func) { + var func_code = func.toString(); + + // Try and match with brackets, but fallback to matching without + var arrow = func_code.match(/^\(\)\s*=>\s*(?:{(.*)}\s*|(.*))$/); + + // Check for JS line separators + if (arrow !== null && !/[\u000A\u000D\u2028\u2029]/.test(func_code)) { + var trimmed = (arrow[1] !== undefined ? arrow[1] : arrow[2]).trim(); + // drop trailing ; if there's no earlier ones + trimmed = trimmed.replace(/^([^;]*)(;\s*)+$/, "$1"); + + if (trimmed) { + let name = trimmed; + if (seen_func_name[trimmed]) { + // This subtest name already exists, so add a suffix. + name += " " + seen_func_name[trimmed]; + } else { + seen_func_name[trimmed] = 0; + } + seen_func_name[trimmed] += 1; + return name; + } + } + } + + return test_environment.next_default_test_name(); + } + /* * API functions */ @@ -530,17 +567,18 @@ policies and contribution forms [3]. tests.status.message = '`test` invoked after `promise_setup`'; tests.complete(); } - var test_name = name ? name : test_environment.next_default_test_name(); + var test_name = get_test_name(func, name); var test_obj = new Test(test_name, properties); var value = test_obj.step(func, test_obj, test_obj); if (value !== undefined) { - var msg = "Test named \"" + test_name + - "\" inappropriately returned a value"; + var msg = 'Test named "' + test_name + + '" passed a function to `test` that returned a value.'; try { - if (value && value.hasOwnProperty("then")) { - msg += ", consider using `promise_test` instead"; + if (value && typeof value.then === 'function') { + msg += ' Consider using `promise_test` instead when ' + + 'using Promises or async/await.'; } } catch (err) {} @@ -565,10 +603,33 @@ policies and contribution forms [3]. name = func; func = null; } - var test_name = name ? name : test_environment.next_default_test_name(); + var test_name = get_test_name(func, name); var test_obj = new Test(test_name, properties); if (func) { - test_obj.step(func, test_obj, test_obj); + var value = test_obj.step(func, test_obj, test_obj); + + // Test authors sometimes return values to async_test, expecting us + // to handle the value somehow. Make doing so a harness error to be + // clear this is invalid, and point authors to promise_test if it + // may be appropriate. + // + // Note that we only perform this check on the initial function + // passed to async_test, not on any later steps - we haven't seen a + // consistent problem with those (and it's harder to check). + if (value !== undefined) { + var msg = 'Test named "' + test_name + + '" passed a function to `async_test` that returned a value.'; + + try { + if (value && typeof value.then === 'function') { + msg += ' Consider using `promise_test` instead when ' + + 'using Promises or async/await.'; + } + } catch (err) {} + + tests.set_status(tests.status.ERROR, msg); + tests.complete(); + } } return test_obj; } @@ -579,7 +640,7 @@ policies and contribution forms [3]. name = func; func = null; } - var test_name = name ? name : test_environment.next_default_test_name(); + var test_name = get_test_name(func, name); var test = new Test(test_name, properties); test._is_promise_test = true; @@ -1323,10 +1384,16 @@ policies and contribution forms [3]. "expected a number but got a ${type_actual}", {type_actual:typeof actual}); - assert(Math.abs(actual - expected) <= epsilon, - "assert_approx_equals", description, - "expected ${expected} +/- ${epsilon} but got ${actual}", - {expected:expected, actual:actual, epsilon:epsilon}); + // The epsilon math below does not place nice with NaN and Infinity + // But in this case Infinity = Infinity and NaN = NaN + if (isFinite(actual) || isFinite(expected)) { + assert(Math.abs(actual - expected) <= epsilon, + "assert_approx_equals", description, + "expected ${expected} +/- ${epsilon} but got ${actual}", + {expected:expected, actual:actual, epsilon:epsilon}); + } else { + assert_equals(actual, expected); + } } expose(assert_approx_equals, "assert_approx_equals"); @@ -3773,7 +3840,7 @@ policies and contribution forms [3]. function get_title() { if ('document' in global_scope) { - //Don't use document.title to work around an Opera bug in XHTML documents + //Don't use document.title to work around an Opera/Presto bug in XHTML documents var title = document.getElementsByTagName("title")[0]; if (title && title.firstChild && title.firstChild.data) { return title.firstChild.data; diff --git a/test/fixtures/wpt/url/README.md b/test/fixtures/wpt/url/README.md index 823a8eec022282..50a71bb482df9e 100644 --- a/test/fixtures/wpt/url/README.md +++ b/test/fixtures/wpt/url/README.md @@ -44,6 +44,10 @@ expected to fail. Tests in `/encoding` and `/html/infrastructure/urls/resolving-urls/query-encoding/` cover the encoding argument to the URL parser. +There's also limited coverage in `resources/percent-encoding.json` for percent-encode after encoding +with _percentEncodeSet_ set to special-query percent-encode set and _spaceAsPlus_ set to false. +(Improvements to expand coverage here are welcome.) + ## Specification The tests in this directory assert conformance with [the URL Standard][URL]. diff --git a/test/fixtures/wpt/url/percent-encoding.window.js b/test/fixtures/wpt/url/percent-encoding.window.js new file mode 100644 index 00000000000000..dcb5c1e55b21b7 --- /dev/null +++ b/test/fixtures/wpt/url/percent-encoding.window.js @@ -0,0 +1,33 @@ +promise_test(() => fetch("resources/percent-encoding.json").then(res => res.json()).then(runTests), "Loading data…"); + +function runTests(testUnits) { + for (const testUnit of testUnits) { + // Ignore comments + if (typeof testUnit === "string") { + continue; + } + for (const encoding of Object.keys(testUnit.output)) { + async_test(t => { + const frame = document.body.appendChild(document.createElement("iframe")); + t.add_cleanup(() => frame.remove()); + frame.onload = t.step_func_done(() => { + const output = frame.contentDocument.querySelector("a"); + // Test that the fragment is always UTF-8 encoded + assert_equals(output.hash, `#${testUnit.output["utf-8"]}`, "fragment"); + assert_equals(output.search, `?${testUnit.output[encoding]}`, "query"); + }); + frame.src = `resources/percent-encoding.py?encoding=${encoding}&value=${toBase64(testUnit.input)}`; + }, `Input ${testUnit.input} with encoding ${encoding}`); + } + } +} + +// Use base64 to avoid relying on the URL parser to get UTF-8 percent-encoding correctly. This does +// not use btoa directly as that only works with code points in the range U+0000 to U+00FF, +// inclusive. +function toBase64(input) { + const bytes = new TextEncoder().encode(input); + const byteString = Array.from(bytes, byte => String.fromCharCode(byte)).join(""); + const encoded = self.btoa(byteString); + return encoded; +} diff --git a/test/fixtures/wpt/url/resources/percent-encoding.json b/test/fixtures/wpt/url/resources/percent-encoding.json new file mode 100644 index 00000000000000..eccd1db62fe601 --- /dev/null +++ b/test/fixtures/wpt/url/resources/percent-encoding.json @@ -0,0 +1,48 @@ +[ + "Tests for percent-encoding.", + { + "input": "\u2020", + "output": { + "big5": "%26%238224%3B", + "euc-kr": "%A2%D3", + "utf-8": "%E2%80%A0", + "windows-1252": "%86" + } + }, + "This uses a trailing A to prevent the URL parser from trimming the C0 control.", + { + "input": "\u000EA", + "output": { + "big5": "%0EA", + "iso-2022-jp": "%26%2365533%3BA", + "utf-8": "%0EA" + } + }, + { + "input": "\u203E\u005C", + "output": { + "iso-2022-jp": "%1B(J~%1B(B\\", + "utf-8": "%E2%80%BE\\" + } + }, + { + "input": "\uE5E5", + "output": { + "gb18030": "%26%2358853%3B", + "utf-8": "%EE%97%A5" + } + }, + { + "input": "\u2212", + "output": { + "shift_jis": "%81|", + "utf-8": "%E2%88%92" + } + }, + { + "input": "á|", + "output": { + "utf-8": "%C3%A1|" + } + } +] diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index e18a5e0213fb3e..f1e9b8304d5fda 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -8,15 +8,15 @@ "path": "encoding" }, "url": { - "commit": "54c6d64be071c60baaad8c4da0365b962ffbe77c", + "commit": "09d8830be15da7e3a44f32a934609c25357d6ef3", "path": "url" }, "resources": { - "commit": "1d14e821b9586f250e6a31d550504e3d16a05ae7", + "commit": "001e50de41dc35820774b27e31f77a165f4c0b9b", "path": "resources" }, "interfaces": { - "commit": "15e47f779cf61555669b0f67e2c49b9c830b9019", + "commit": "8719553b2dd8f0f39d38253ccac2ee9ab4d6c87b", "path": "interfaces" }, "html/webappapis/microtask-queuing": { @@ -39,4 +39,4 @@ "commit": "7caa3de7471cf19b78ee9efa313c7341a462b5e3", "path": "dom/abort" } -} +} \ No newline at end of file diff --git a/test/wpt/status/url.json b/test/wpt/status/url.json index 9038f6df80703e..89601af0ca5814 100644 --- a/test/wpt/status/url.json +++ b/test/wpt/status/url.json @@ -3,6 +3,10 @@ "requires": ["small-icu"], "skip": "TODO: port from .window.js" }, + "percent-encoding.window.js": { + "requires": ["small-icu"], + "skip": "TODO: port from .window.js" + }, "historical.any.js": { "requires": ["small-icu"] }, From 7086f2e6532eb641af9aeb5a762585eb6fdfeaa6 Mon Sep 17 00:00:00 2001 From: raisinten Date: Sun, 15 Nov 2020 20:54:12 +0530 Subject: [PATCH 67/98] src: refactor using-declarations node_env_var.cc PR-URL: https://github.com/nodejs/node/pull/36128 Reviewed-By: Anna Henningsen Reviewed-By: Rich Trott --- src/node_env_var.cc | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/node_env_var.cc b/src/node_env_var.cc index 1cf959a6095afd..45f2967f4b0eb7 100644 --- a/src/node_env_var.cc +++ b/src/node_env_var.cc @@ -10,6 +10,8 @@ namespace node { using v8::Array; using v8::Boolean; using v8::Context; +using v8::DontDelete; +using v8::DontEnum; using v8::EscapableHandleScope; using v8::HandleScope; using v8::Integer; @@ -26,6 +28,7 @@ using v8::Object; using v8::ObjectTemplate; using v8::PropertyCallbackInfo; using v8::PropertyHandlerFlags; +using v8::ReadOnly; using v8::String; using v8::Value; @@ -93,10 +96,10 @@ Maybe RealEnvStore::Get(const char* key) const { } if (ret >= 0) { // Env key value fetch success. - return v8::Just(std::string(*val, init_sz)); + return Just(std::string(*val, init_sz)); } - return v8::Nothing(); + return Nothing(); } MaybeLocal RealEnvStore::Get(Isolate* isolate, @@ -141,9 +144,9 @@ int32_t RealEnvStore::Query(const char* key) const { #ifdef _WIN32 if (key[0] == '=') { - return static_cast(v8::ReadOnly) | - static_cast(v8::DontDelete) | - static_cast(v8::DontEnum); + return static_cast(ReadOnly) | + static_cast(DontDelete) | + static_cast(DontEnum); } #endif @@ -191,7 +194,7 @@ Local RealEnvStore::Enumerate(Isolate* isolate) const { return Array::New(isolate, env_v.out(), env_v_index); } -std::shared_ptr KVStore::Clone(v8::Isolate* isolate) const { +std::shared_ptr KVStore::Clone(Isolate* isolate) const { HandleScope handle_scope(isolate); Local context = isolate->GetCurrentContext(); @@ -211,7 +214,7 @@ std::shared_ptr KVStore::Clone(v8::Isolate* isolate) const { Maybe MapKVStore::Get(const char* key) const { Mutex::ScopedLock lock(mutex_); auto it = map_.find(key); - return it == map_.end() ? v8::Nothing() : v8::Just(it->second); + return it == map_.end() ? Nothing() : Just(it->second); } MaybeLocal MapKVStore::Get(Isolate* isolate, Local key) const { From bb29508e8fa39e84209f5e44b6fd18cf2a52ec85 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 16 Nov 2020 07:55:45 -0800 Subject: [PATCH 68/98] doc: add link for v8.takeCoverage() PR-URL: https://github.com/nodejs/node/pull/36135 Reviewed-By: Luigi Pinca Reviewed-By: Harshitha K P --- doc/api/v8.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/api/v8.md b/doc/api/v8.md index a67c6a0f83b24b..8f13042f80b9c6 100644 --- a/doc/api/v8.md +++ b/doc/api/v8.md @@ -247,7 +247,7 @@ added: v15.1.0 The `v8.stopCoverage()` method allows the user to stop the coverage collection started by [`NODE_V8_COVERAGE`][], so that V8 can release the execution count records and optimize code. This can be used in conjunction with -`v8.takeCoverage()` if the user wants to collect the coverage on demand. +[`v8.takeCoverage()`][] if the user wants to collect the coverage on demand. ## `v8.writeHeapSnapshot([filename])` -> Stability: 1 - Recursive removal is experimental. - * `path` {string|Buffer|URL} * `options` {Object} * `maxRetries` {integer} If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or From f0aed8c01c11706798da7732e4359011e0e9fb74 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Tue, 17 Nov 2020 00:18:59 +0100 Subject: [PATCH 73/98] http2: add support for TypedArray to getUnpackedSettings PR-URL: https://github.com/nodejs/node/pull/36141 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Rich Trott --- doc/api/http2.md | 2 +- lib/internal/buffer.js | 2 + lib/internal/http2/core.js | 10 ++-- test/parallel/test-http2-getpackedsettings.js | 56 ++++++++++++++++++- 4 files changed, 63 insertions(+), 7 deletions(-) diff --git a/doc/api/http2.md b/doc/api/http2.md index 516b9f6aa10777..9fc447b657b368 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -2544,7 +2544,7 @@ console.log(packed.toString('base64')); added: v8.4.0 --> -* `buf` {Buffer|Uint8Array} The packed settings. +* `buf` {Buffer|TypedArray} The packed settings. * Returns: {HTTP/2 Settings Object} Returns a [HTTP/2 Settings Object][] containing the deserialized settings from diff --git a/lib/internal/buffer.js b/lib/internal/buffer.js index e23896eac93fba..9eb722e2634deb 100644 --- a/lib/internal/buffer.js +++ b/lib/internal/buffer.js @@ -1067,5 +1067,7 @@ module.exports = { addBufferPrototypeMethods, markAsUntransferable, createUnsafeBuffer, + readUInt16BE, + readUInt32BE, reconnectZeroFillToggle }; diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index db92c301b0b76a..84d0cd5d948b6b 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -13,6 +13,7 @@ const { ObjectDefineProperty, ObjectPrototypeHasOwnProperty, Promise, + ReflectApply, ReflectGetPrototypeOf, Set, Symbol, @@ -32,6 +33,7 @@ const assert = require('assert'); const EventEmitter = require('events'); const fs = require('fs'); const http = require('http'); +const { readUInt16BE, readUInt32BE } = require('internal/buffer'); const net = require('net'); const { Duplex } = require('stream'); const tls = require('tls'); @@ -3208,18 +3210,18 @@ function getPackedSettings(settings) { } function getUnpackedSettings(buf, options = {}) { - if (!isArrayBufferView(buf)) { + if (!isArrayBufferView(buf) || buf.length === undefined) { throw new ERR_INVALID_ARG_TYPE('buf', - ['Buffer', 'TypedArray', 'DataView'], buf); + ['Buffer', 'TypedArray'], buf); } if (buf.length % 6 !== 0) throw new ERR_HTTP2_INVALID_PACKED_SETTINGS_LENGTH(); const settings = {}; let offset = 0; while (offset < buf.length) { - const id = buf.readUInt16BE(offset); + const id = ReflectApply(readUInt16BE, buf, [offset]); offset += 2; - const value = buf.readUInt32BE(offset); + const value = ReflectApply(readUInt32BE, buf, [offset]); switch (id) { case NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: settings.headerTableSize = value; diff --git a/test/parallel/test-http2-getpackedsettings.js b/test/parallel/test-http2-getpackedsettings.js index f33c0e916a5d13..374e537d5634aa 100644 --- a/test/parallel/test-http2-getpackedsettings.js +++ b/test/parallel/test-http2-getpackedsettings.js @@ -133,8 +133,8 @@ http2.getPackedSettings({ enablePush: false }); code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', message: - 'The "buf" argument must be an instance of Buffer, TypedArray, or ' + - `DataView.${common.invalidArgTypeHelper(input)}` + 'The "buf" argument must be an instance of Buffer or TypedArray.' + + common.invalidArgTypeHelper(input) }); }); @@ -159,6 +159,58 @@ http2.getPackedSettings({ enablePush: false }); assert.strictEqual(settings.enableConnectProtocol, false); } +{ + const packed = new Uint16Array([ + 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x03, 0x00, 0x00, 0x00, 0xc8, + 0x00, 0x05, 0x00, 0x00, 0x4e, 0x20, + 0x00, 0x04, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x06, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, + 0x00, 0x08, 0x00, 0x00, 0x00, 0x00]); + + assert.throws(() => { + http2.getUnpackedSettings(packed.slice(5)); + }, { + code: 'ERR_HTTP2_INVALID_PACKED_SETTINGS_LENGTH', + name: 'RangeError', + message: 'Packed settings length must be a multiple of six' + }); + + const settings = http2.getUnpackedSettings(packed); + + assert(settings); + assert.strictEqual(settings.headerTableSize, 100); + assert.strictEqual(settings.initialWindowSize, 100); + assert.strictEqual(settings.maxFrameSize, 20000); + assert.strictEqual(settings.maxConcurrentStreams, 200); + assert.strictEqual(settings.maxHeaderListSize, 100); + assert.strictEqual(settings.maxHeaderSize, 100); + assert.strictEqual(settings.enablePush, true); + assert.strictEqual(settings.enableConnectProtocol, false); +} + +{ + const packed = new DataView(Buffer.from([ + 0x00, 0x01, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x03, 0x00, 0x00, 0x00, 0xc8, + 0x00, 0x05, 0x00, 0x00, 0x4e, 0x20, + 0x00, 0x04, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x06, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, + 0x00, 0x08, 0x00, 0x00, 0x00, 0x00]).buffer); + + assert.throws(() => { + http2.getUnpackedSettings(packed); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: + 'The "buf" argument must be an instance of Buffer or TypedArray.' + + common.invalidArgTypeHelper(packed) + }); +} + { const packed = Buffer.from([ 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, From 1a4d43f840968305e9a1271278463f473cbc9fd3 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 15 Nov 2020 19:18:33 +0100 Subject: [PATCH 74/98] lib: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36140 Reviewed-By: Rich Trott Reviewed-By: James M Snell --- lib/internal/main/print_help.js | 39 ++++++++++++++++++------------ lib/internal/main/worker_thread.js | 11 ++++++--- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/lib/internal/main/print_help.js b/lib/internal/main/print_help.js index 0850681882759f..94c81c484880b3 100644 --- a/lib/internal/main/print_help.js +++ b/lib/internal/main/print_help.js @@ -1,12 +1,17 @@ 'use strict'; const { + ArrayPrototypeConcat, + ArrayPrototypeSort, Boolean, - Map, MathFloor, MathMax, ObjectKeys, RegExp, + StringPrototypeTrimLeft, + StringPrototypeRepeat, + StringPrototypeReplace, + SafeMap, } = primordials; const { types } = internalBinding('options'); @@ -23,7 +28,7 @@ for (const key of ObjectKeys(types)) // Environment variables are parsed ad-hoc throughout the code base, // so we gather the documentation here. const { hasIntl, hasSmallICU, hasNodeOptions } = internalBinding('config'); -const envVars = new Map([ +const envVars = new SafeMap(ArrayPrototypeConcat([ ['NODE_DEBUG', { helpText: "','-separated list of core modules that " + 'should print debug information' }], ['NODE_DEBUG_NATIVE', { helpText: "','-separated list of C++ core debug " + @@ -51,28 +56,30 @@ const envVars = new Map([ 'to' }], ['UV_THREADPOOL_SIZE', { helpText: 'sets the number of threads used in ' + 'libuv\'s threadpool' }] -].concat(hasIntl ? [ +], hasIntl ? [ ['NODE_ICU_DATA', { helpText: 'data path for ICU (Intl object) data' + hasSmallICU ? '' : ' (will extend linked-in data)' }] -] : []).concat(hasNodeOptions ? [ +] : []), (hasNodeOptions ? [ ['NODE_OPTIONS', { helpText: 'set CLI options in the environment via a ' + 'space-separated list' }] -] : []).concat(hasCrypto ? [ +] : []), hasCrypto ? [ ['OPENSSL_CONF', { helpText: 'load OpenSSL configuration from file' }], ['SSL_CERT_DIR', { helpText: 'sets OpenSSL\'s directory of trusted ' + 'certificates when used in conjunction with --use-openssl-ca' }], ['SSL_CERT_FILE', { helpText: 'sets OpenSSL\'s trusted certificate file ' + 'when used in conjunction with --use-openssl-ca' }], -] : [])); +] : []); function indent(text, depth) { - return text.replace(/^/gm, ' '.repeat(depth)); + return StringPrototypeReplace(text, /^/gm, StringPrototypeRepeat(' ', depth)); } function fold(text, width) { - return text.replace(new RegExp(`([^\n]{0,${width}})( |$)`, 'g'), - (_, newLine, end) => newLine + (end === ' ' ? '\n' : '')); + return StringPrototypeReplace(text, + new RegExp(`([^\n]{0,${width}})( |$)`, 'g'), + (_, newLine, end) => + newLine + (end === ' ' ? '\n' : '')); } function getArgDescription(type) { @@ -94,13 +101,15 @@ function getArgDescription(type) { } } -function format({ options, aliases = new Map(), firstColumn, secondColumn }) { +function format( + { options, aliases = new SafeMap(), firstColumn, secondColumn } +) { let text = ''; let maxFirstColumnUsed = 0; for (const [ name, { helpText, type, value } - ] of [...options.entries()].sort()) { + ] of ArrayPrototypeSort([...options.entries()])) { if (!helpText) continue; let displayName = name; @@ -136,12 +145,12 @@ function format({ options, aliases = new Map(), firstColumn, secondColumn }) { text += displayName; maxFirstColumnUsed = MathMax(maxFirstColumnUsed, displayName.length); if (displayName.length >= firstColumn) - text += '\n' + ' '.repeat(firstColumn); + text += '\n' + StringPrototypeRepeat(' ', firstColumn); else - text += ' '.repeat(firstColumn - displayName.length); + text += StringPrototypeRepeat(' ', firstColumn - displayName.length); - text += indent(fold(displayHelpText, secondColumn), - firstColumn).trimLeft() + '\n'; + text += indent(StringPrototypeTrimLeft(fold(displayHelpText, secondColumn), + firstColumn)) + '\n'; } if (maxFirstColumnUsed < firstColumn - 4) { diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index c2a4b67e6e0669..7734aaa2c2011c 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -4,7 +4,10 @@ // message port. const { + ArrayPrototypeConcat, + ArrayPrototypeSplice, ObjectDefineProperty, + PromisePrototypeCatch, } = primordials; const { @@ -122,7 +125,7 @@ port.on('message', (message) => { loadPreloadModules(); initializeFrozenIntrinsics(); if (argv !== undefined) { - process.argv = process.argv.concat(argv); + process.argv = ArrayPrototypeConcat(process.argv, argv); } publicWorker.parentPort = publicPort; publicWorker.workerData = workerData; @@ -159,18 +162,18 @@ port.on('message', (message) => { enumerable: true, value: filename, }); - process.argv.splice(1, 0, name); + ArrayPrototypeSplice(process.argv, 1, 0, name); evalScript(name, filename); } else if (doEval === 'module') { const { evalModule } = require('internal/process/execution'); - evalModule(filename).catch((e) => { + PromisePrototypeCatch(evalModule(filename), (e) => { workerOnGlobalUncaughtException(e, true); }); } else { // script filename // runMain here might be monkey-patched by users in --require. // XXX: the monkey-patchability here should probably be deprecated. - process.argv.splice(1, 0, filename); + ArrayPrototypeSplice(process.argv, 1, 0, filename); CJSLoader.Module.runMain(filename); } } else if (message.type === STDIO_PAYLOAD) { From fcc38a131292e77733eb8c5a6ef1456ddacdda85 Mon Sep 17 00:00:00 2001 From: raisinten Date: Sat, 14 Nov 2020 20:50:03 +0530 Subject: [PATCH 75/98] build: replace which with command -v PR-URL: https://github.com/nodejs/node/pull/36118 Reviewed-By: Antoine du Hamel Reviewed-By: Joyee Cheung Reviewed-By: Rich Trott --- .github/workflows/linters.yml | 8 ++++---- .github/workflows/misc.yml | 2 +- BSDmakefile | 2 +- Makefile | 8 ++++---- android-configure | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 6c25e775d34a1e..14aed9bfddfdc3 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -24,7 +24,7 @@ jobs: - name: Environment Information run: npx envinfo - name: Lint addon docs - run: NODE=$(which node) make lint-addon-docs + run: NODE=$(command -v node) make lint-addon-docs lint-cpp: runs-on: ubuntu-latest steps: @@ -50,7 +50,7 @@ jobs: - name: Lint docs run: | echo "::add-matcher::.github/workflows/remark-lint-problem-matcher.json" - NODE=$(which node) make lint-md + NODE=$(command -v node) make lint-md lint-js: runs-on: ubuntu-latest steps: @@ -62,7 +62,7 @@ jobs: - name: Environment Information run: npx envinfo - name: Lint JavaScript files - run: NODE=$(which node) make lint-js + run: NODE=$(command -v node) make lint-js lint-py: runs-on: ubuntu-latest steps: @@ -76,7 +76,7 @@ jobs: - name: Lint Python run: | make lint-py-build || true - NODE=$(which node) make lint-py + NODE=$(command -v node) make lint-py lint-codeowners: runs-on: ubuntu-latest diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index 4ee135c5ebc43e..1444dff7262819 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -23,7 +23,7 @@ jobs: - name: Environment Information run: npx envinfo - name: Build - run: NODE=$(which node) make doc-only + run: NODE=$(command -v node) make doc-only - uses: actions/upload-artifact@v1 with: name: docs diff --git a/BSDmakefile b/BSDmakefile index b2f36fa28720f1..3994ab9efd9a4d 100644 --- a/BSDmakefile +++ b/BSDmakefile @@ -3,7 +3,7 @@ FLAGS=${.MAKEFLAGS:C/\-J ([0-9]+,?)+//W} all: .DEFAULT .DEFAULT: - @which gmake > /dev/null 2>&1 ||\ + @command -v gmake > /dev/null 2>&1 ||\ (echo "GMake is required for node.js to build.\ Install and try again" && exit 1) @gmake ${.FLAGS} ${.TARGETS} diff --git a/Makefile b/Makefile index 7db5b3320f6181..6379f06a2e0b6d 100644 --- a/Makefile +++ b/Makefile @@ -65,8 +65,8 @@ V ?= 0 available-node = \ if [ -x $(PWD)/$(NODE) ] && [ -e $(PWD)/$(NODE) ]; then \ $(PWD)/$(NODE) $(1); \ - elif [ -x `which node` ] && [ -e `which node` ] && [ `which node` ]; then \ - `which node` $(1); \ + elif [ -x `command -v node` ] && [ -e `command -v node` ] && [ `command -v node` ]; then \ + `command -v node` $(1); \ else \ echo "No available node, cannot run \"node $(1)\""; \ exit 1; \ @@ -898,7 +898,7 @@ BINARYNAME=$(TARNAME)-$(PLATFORM)-$(ARCH) endif BINARYTAR=$(BINARYNAME).tar # OSX doesn't have xz installed by default, http://macpkg.sourceforge.net/ -HAS_XZ ?= $(shell which xz > /dev/null 2>&1; [ $$? -eq 0 ] && echo 1 || echo 0) +HAS_XZ ?= $(shell command -v xz > /dev/null 2>&1; [ $$? -eq 0 ] && echo 1 || echo 0) # Supply SKIP_XZ=1 to explicitly skip .tar.xz creation SKIP_XZ ?= 0 XZ = $(shell [ $(HAS_XZ) -eq 1 -a $(SKIP_XZ) -eq 0 ] && echo 1 || echo 0) @@ -1383,7 +1383,7 @@ lint-clean: $(RM) tools/.*lintstamp $(RM) .eslintcache -HAS_DOCKER ?= $(shell which docker > /dev/null 2>&1; [ $$? -eq 0 ] && echo 1 || echo 0) +HAS_DOCKER ?= $(shell command -v docker > /dev/null 2>&1; [ $$? -eq 0 ] && echo 1 || echo 0) ifeq ($(HAS_DOCKER), 1) DOCKER_COMMAND ?= docker run -it -v $(PWD):/node diff --git a/android-configure b/android-configure index a7cb2b9c8b4a78..e3f4a721827e84 100755 --- a/android-configure +++ b/android-configure @@ -50,8 +50,8 @@ esac HOST_OS="linux" HOST_ARCH="x86_64" -export CC_host=$(which gcc) -export CXX_host=$(which g++) +export CC_host=$(command -v gcc) +export CXX_host=$(command -v g++) host_gcc_version=$($CC_host --version | grep gcc | awk '{print $NF}') major=$(echo $host_gcc_version | awk -F . '{print $1}') From 2d2491284ea926b62ee7473cc4a7f9e8b717f8ca Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 18 Nov 2020 16:38:11 -0500 Subject: [PATCH 76/98] tools: only use 2 cores for macos action MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There are only 2 cores available so we shouldn't be using -j8 Refs: https://docs.github.com/en/free-pro-team@latest/actions/reference/specifications-for-github-hosted-runners#supported-runners-and-hardware-resources PR-URL: https://github.com/nodejs/node/pull/36169 Reviewed-By: Richard Lau Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Benjamin Gruenbaum Reviewed-By: Mary Marchini --- .github/workflows/test-macos.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index 5c5f2b2a1953b8..08e6e91e8f4fbd 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -25,6 +25,6 @@ jobs: - name: Environment Information run: npx envinfo - name: Build - run: make build-ci -j8 V=1 CONFIG_FLAGS="--error-on-warn --experimental-quic" + run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn --experimental-quic" - name: Test - run: make run-ci -j8 V=1 TEST_CI_ARGS="-p actions" + run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions" From 3ee556a8677fca74ce012f1daceb2f22c912ca26 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Tue, 17 Nov 2020 16:43:59 -0800 Subject: [PATCH 77/98] benchmark: fix build warnings The napi/* benchmarks were using an incorrect signature for the V8 add-on init function. This was causing a warning. Signed-off-by: Gabriel Schulhof PR-URL: https://github.com/nodejs/node/pull/36157 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Luigi Pinca Reviewed-By: Richard Lau --- benchmark/napi/function_args/binding.cc | 4 +++- benchmark/napi/function_call/binding.cc | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/benchmark/napi/function_args/binding.cc b/benchmark/napi/function_args/binding.cc index 2c54dd424d405d..078fe0ee3ea767 100644 --- a/benchmark/napi/function_args/binding.cc +++ b/benchmark/napi/function_args/binding.cc @@ -123,7 +123,9 @@ void CallWithArguments(const FunctionCallbackInfo& args) { } } -void Initialize(Local target) { +void Initialize(Local target, + Local module, + void* data) { NODE_SET_METHOD(target, "callWithString", CallWithString); NODE_SET_METHOD(target, "callWithLongString", CallWithString); diff --git a/benchmark/napi/function_call/binding.cc b/benchmark/napi/function_call/binding.cc index 289a94ac3ecc88..570f96f41ec458 100644 --- a/benchmark/napi/function_call/binding.cc +++ b/benchmark/napi/function_call/binding.cc @@ -7,7 +7,9 @@ void Hello(const v8::FunctionCallbackInfo& args) { args.GetReturnValue().Set(c++); } -void Initialize(v8::Local target) { +void Initialize(v8::Local target, + v8::Local module, + void* data) { NODE_SET_METHOD(target, "hello", Hello); } From 4ab4a99900041c1364b4d7ab1128430adab53d21 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Tue, 17 Nov 2020 20:13:37 -0800 Subject: [PATCH 78/98] doc: de-emphasize wrapping in napi_define_class Change the documentation for `napi_define_class` in such a way that it mentions wrapping C++ class instances as a possible use for the API, rather than making the assumption that it is the use case for the API. Signed-off-by: Gabriel Schulhof Co-authored-by: Rich Trott Fixes: https://github.com/nodejs/node/issues/36150 PR-URL: https://github.com/nodejs/node/pull/36159 Reviewed-By: Rich Trott Reviewed-By: Michael Dawson --- doc/api/n-api.md | 55 +++++++++++++++++++++++++++--------------------- 1 file changed, 31 insertions(+), 24 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 615237c76bcae8..9ad15223f11da7 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -4738,14 +4738,15 @@ napi_status napi_define_class(napi_env env, ``` * `[in] env`: The environment that the API is invoked under. -* `[in] utf8name`: Name of the JavaScript constructor function; this is - not required to be the same as the C++ class name, though it is recommended - for clarity. +* `[in] utf8name`: Name of the JavaScript constructor function; When wrapping a + C++ class, we recommend for clarity that this name be the same as that of + the C++ class. * `[in] length`: The length of the `utf8name` in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. * `[in] constructor`: Callback function that handles constructing instances - of the class. This should be a static method on the class, not an actual - C++ constructor function. [`napi_callback`][] provides more details. + of the class. When wrapping a C++ class, this method must be a static member + with the [`napi_callback`][] signature. A C++ class constructor cannot be + used. [`napi_callback`][] provides more details. * `[in] data`: Optional data to be passed to the constructor callback as the `data` property of the callback info. * `[in] property_count`: Number of items in the `properties` array argument. @@ -4757,27 +4758,33 @@ napi_status napi_define_class(napi_env env, Returns `napi_ok` if the API succeeded. -Defines a JavaScript class that corresponds to a C++ class, including: - -* A JavaScript constructor function that has the class name and invokes the - provided C++ constructor callback. -* Properties on the constructor function corresponding to _static_ data - properties, accessors, and methods of the C++ class (defined by - property descriptors with the `napi_static` attribute). -* Properties on the constructor function's `prototype` object corresponding to - _non-static_ data properties, accessors, and methods of the C++ class - (defined by property descriptors without the `napi_static` attribute). - -The C++ constructor callback should be a static method on the class that calls -the actual class constructor, then wraps the new C++ instance in a JavaScript -object, and returns the wrapper object. See `napi_wrap()` for details. +Defines a JavaScript class, including: + +* A JavaScript constructor function that has the class name. When wrapping a + corresponding C++ class, the callback passed via `constructor` can be used to + instantiate a new C++ class instance, which can then be placed inside the + JavaScript object instance being constructed using [`napi_wrap`][]. +* Properties on the constructor function whose implementation can call + corresponding _static_ data properties, accessors, and methods of the C++ + class (defined by property descriptors with the `napi_static` attribute). +* Properties on the constructor function's `prototype` object. When wrapping a + C++ class, _non-static_ data properties, accessors, and methods of the C++ + class can be called from the static functions given in the property + descriptors without the `napi_static` attribute after retrieving the C++ class + instance placed inside the JavaScript object instance by using + [`napi_unwrap`][]. + +When wrapping a C++ class, the C++ constructor callback passed via `constructor` +should be a static method on the class that calls the actual class constructor, +then wraps the new C++ instance in a JavaScript object, and returns the wrapper +object. See [`napi_wrap`][] for details. The JavaScript constructor function returned from [`napi_define_class`][] is -often saved and used later, to construct new instances of the class from native -code, and/or check whether provided values are instances of the class. In that -case, to prevent the function value from being garbage-collected, create a -persistent reference to it using [`napi_create_reference`][] and ensure the -reference count is kept >= 1. +often saved and used later to construct new instances of the class from native +code, and/or to check whether provided values are instances of the class. In +that case, to prevent the function value from being garbage-collected, a +strong persistent reference to it can be created using +[`napi_create_reference`][], ensuring that the reference count is kept >= 1. Any non-`NULL` data which is passed to this API via the `data` parameter or via the `data` field of the `napi_property_descriptor` array items can be associated From a6ef92bc277ad91e881a5fbf63cf5a7a51e255af Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 12 Nov 2020 19:11:35 -0800 Subject: [PATCH 79/98] tools: bump unist-util-find@1.0.1 to unist-util-find@1.0.2 PR-URL: https://github.com/nodejs/node/pull/36106 Reviewed-By: Antoine du Hamel --- tools/doc/package-lock.json | 446 +----------------------------------- tools/doc/package.json | 2 +- 2 files changed, 8 insertions(+), 440 deletions(-) diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index bf8da52d5fc22b..9586fa494483a2 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -21,7 +21,7 @@ "remark-rehype": "7.0.0", "to-vfile": "6.1.0", "unified": "9.2.0", - "unist-util-find": "1.0.1", + "unist-util-find": "^1.0.2", "unist-util-select": "3.0.1", "unist-util-visit": "2.0.3" }, @@ -95,12 +95,6 @@ "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", "dev": true }, - "node_modules/collapse-white-space": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", - "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==", - "dev": true - }, "node_modules/comma-separated-tokens": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", @@ -142,21 +136,6 @@ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", "dev": true }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - } - }, "node_modules/hast-to-hyperscript": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-7.0.4.tgz", @@ -288,12 +267,6 @@ "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", "dev": true }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, "node_modules/inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", @@ -356,18 +329,6 @@ "integrity": "sha1-vkF32yiajMw8CZDx2ya1si/BVUw=", "dev": true }, - "node_modules/longest-streak": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-1.0.0.tgz", - "integrity": "sha1-0GWXxNTDG1LMsfXY+P5xSOr9aWU=", - "dev": true - }, - "node_modules/markdown-table": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-0.4.0.tgz", - "integrity": "sha1-iQwsGzv+g/sA5BKbjkz+ZFJw+dE=", - "dev": true - }, "node_modules/mdast-util-definitions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-3.0.1.tgz", @@ -653,15 +614,6 @@ "boolbase": "~1.0.0" } }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, "node_modules/parse-entities": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", @@ -713,17 +665,6 @@ "hast-util-to-html": "^7.1.1" } }, - "node_modules/remark": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/remark/-/remark-5.1.0.tgz", - "integrity": "sha1-y0Y709vLS5l5STXu4c9x16jjBow=", - "dev": true, - "dependencies": { - "remark-parse": "^1.1.0", - "remark-stringify": "^1.1.0", - "unified": "^4.1.1" - } - }, "node_modules/remark-gfm": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", @@ -772,132 +713,6 @@ "mdast-util-to-hast": "^9.1.0" } }, - "node_modules/remark-stringify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-1.1.0.tgz", - "integrity": "sha1-pxBeJbnuK/mkm3XSxCPxGwauIJI=", - "dev": true, - "dependencies": { - "ccount": "^1.0.0", - "extend": "^3.0.0", - "longest-streak": "^1.0.0", - "markdown-table": "^0.4.0", - "parse-entities": "^1.0.2", - "repeat-string": "^1.5.4", - "stringify-entities": "^1.0.1", - "unherit": "^1.0.4" - } - }, - "node_modules/remark-stringify/node_modules/parse-entities": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", - "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", - "dev": true, - "dependencies": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "node_modules/remark-stringify/node_modules/stringify-entities": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-1.3.2.tgz", - "integrity": "sha512-nrBAQClJAPN2p+uGCVJRPIPakKeKWZ9GtBCmormE7pWOSlHat7+x5A8gx85M7HM5Dt0BP3pP5RhVW77WdbJJ3A==", - "dev": true, - "dependencies": { - "character-entities-html4": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "node_modules/remark/node_modules/parse-entities": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", - "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", - "dev": true, - "dependencies": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "node_modules/remark/node_modules/remark-parse": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-1.1.0.tgz", - "integrity": "sha1-w8oQ+ajaBGFcKPCapOMEUQUm7CE=", - "dev": true, - "dependencies": { - "collapse-white-space": "^1.0.0", - "extend": "^3.0.0", - "parse-entities": "^1.0.2", - "repeat-string": "^1.5.4", - "trim": "0.0.1", - "trim-trailing-lines": "^1.0.0", - "unherit": "^1.0.4", - "unist-util-remove-position": "^1.0.0", - "vfile-location": "^2.0.0" - } - }, - "node_modules/remark/node_modules/unified": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-4.2.1.tgz", - "integrity": "sha1-dv9Dqo2kMPbn5KVchOusKtLPzS4=", - "dev": true, - "dependencies": { - "bail": "^1.0.0", - "extend": "^3.0.0", - "has": "^1.0.1", - "once": "^1.3.3", - "trough": "^1.0.0", - "vfile": "^1.0.0" - } - }, - "node_modules/remark/node_modules/unist-util-remove-position": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz", - "integrity": "sha512-tLqd653ArxJIPnKII6LMZwH+mb5q+n/GtXQZo6S6csPRs5zB0u79Yw8ouR3wTw8wxvdJFhpP6Y7jorWdCgLO0A==", - "dev": true, - "dependencies": { - "unist-util-visit": "^1.1.0" - } - }, - "node_modules/remark/node_modules/unist-util-visit": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-1.4.1.tgz", - "integrity": "sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==", - "dev": true, - "dependencies": { - "unist-util-visit-parents": "^2.0.0" - } - }, - "node_modules/remark/node_modules/unist-util-visit-parents": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz", - "integrity": "sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==", - "dev": true, - "dependencies": { - "unist-util-is": "^3.0.0" - } - }, - "node_modules/remark/node_modules/vfile": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-1.4.0.tgz", - "integrity": "sha1-wP1vpIT43r23cfaMMe112I2pf+c=", - "dev": true - }, - "node_modules/remark/node_modules/vfile-location": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-2.0.6.tgz", - "integrity": "sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA==", - "dev": true - }, "node_modules/repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", @@ -954,34 +769,12 @@ "vfile": "^4.0.0" } }, - "node_modules/trim": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", - "integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=", - "dev": true - }, - "node_modules/trim-trailing-lines": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.3.tgz", - "integrity": "sha512-4ku0mmjXifQcTVfYDfR5lpgV7zVqPg6zV9rdZmwOPqq0+Zq19xDqEgagqVbc4pOOShbncuAOIs59R3+3gcF3ZA==", - "dev": true - }, "node_modules/trough": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "dev": true }, - "node_modules/unherit": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", - "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", - "dev": true, - "dependencies": { - "inherits": "^2.0.0", - "xtend": "^4.0.0" - } - }, "node_modules/unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", @@ -1003,13 +796,12 @@ "dev": true }, "node_modules/unist-util-find": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/unist-util-find/-/unist-util-find-1.0.1.tgz", - "integrity": "sha1-EGK7tpKMepfGrcibU3RdTEbCIqI=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unist-util-find/-/unist-util-find-1.0.2.tgz", + "integrity": "sha512-ft06UDYzqi9o9RmGP0sZWI/zvLLQiBW2/MD+rW6mDqbOWDcmknGX9orQPspfuGRYWr8eSJAmfsBcvOpfGRJseA==", "dev": true, "dependencies": { "lodash.iteratee": "^4.5.0", - "remark": "^5.0.1", "unist-util-visit": "^1.1.0" } }, @@ -1139,12 +931,6 @@ "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", "dev": true }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -1225,12 +1011,6 @@ "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", "dev": true }, - "collapse-white-space": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz", - "integrity": "sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ==", - "dev": true - }, "comma-separated-tokens": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", @@ -1264,21 +1044,6 @@ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", "dev": true }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, "hast-to-hyperscript": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-7.0.4.tgz", @@ -1412,12 +1177,6 @@ "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", "dev": true }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, "inline-style-parser": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", @@ -1480,18 +1239,6 @@ "integrity": "sha1-vkF32yiajMw8CZDx2ya1si/BVUw=", "dev": true }, - "longest-streak": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-1.0.0.tgz", - "integrity": "sha1-0GWXxNTDG1LMsfXY+P5xSOr9aWU=", - "dev": true - }, - "markdown-table": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-0.4.0.tgz", - "integrity": "sha1-iQwsGzv+g/sA5BKbjkz+ZFJw+dE=", - "dev": true - }, "mdast-util-definitions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-3.0.1.tgz", @@ -1707,15 +1454,6 @@ "boolbase": "~1.0.0" } }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, "parse-entities": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", @@ -1763,103 +1501,6 @@ "hast-util-to-html": "^7.1.1" } }, - "remark": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/remark/-/remark-5.1.0.tgz", - "integrity": "sha1-y0Y709vLS5l5STXu4c9x16jjBow=", - "dev": true, - "requires": { - "remark-parse": "^1.1.0", - "remark-stringify": "^1.1.0", - "unified": "^4.1.1" - }, - "dependencies": { - "parse-entities": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", - "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", - "dev": true, - "requires": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "remark-parse": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-1.1.0.tgz", - "integrity": "sha1-w8oQ+ajaBGFcKPCapOMEUQUm7CE=", - "dev": true, - "requires": { - "collapse-white-space": "^1.0.0", - "extend": "^3.0.0", - "parse-entities": "^1.0.2", - "repeat-string": "^1.5.4", - "trim": "0.0.1", - "trim-trailing-lines": "^1.0.0", - "unherit": "^1.0.4", - "unist-util-remove-position": "^1.0.0", - "vfile-location": "^2.0.0" - } - }, - "unified": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-4.2.1.tgz", - "integrity": "sha1-dv9Dqo2kMPbn5KVchOusKtLPzS4=", - "dev": true, - "requires": { - "bail": "^1.0.0", - "extend": "^3.0.0", - "has": "^1.0.1", - "once": "^1.3.3", - "trough": "^1.0.0", - "vfile": "^1.0.0" - } - }, - "unist-util-remove-position": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz", - "integrity": "sha512-tLqd653ArxJIPnKII6LMZwH+mb5q+n/GtXQZo6S6csPRs5zB0u79Yw8ouR3wTw8wxvdJFhpP6Y7jorWdCgLO0A==", - "dev": true, - "requires": { - "unist-util-visit": "^1.1.0" - } - }, - "unist-util-visit": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-1.4.1.tgz", - "integrity": "sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==", - "dev": true, - "requires": { - "unist-util-visit-parents": "^2.0.0" - } - }, - "unist-util-visit-parents": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz", - "integrity": "sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==", - "dev": true, - "requires": { - "unist-util-is": "^3.0.0" - } - }, - "vfile": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-1.4.0.tgz", - "integrity": "sha1-wP1vpIT43r23cfaMMe112I2pf+c=", - "dev": true - }, - "vfile-location": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-2.0.6.tgz", - "integrity": "sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA==", - "dev": true - } - } - }, "remark-gfm": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", @@ -1900,50 +1541,6 @@ "mdast-util-to-hast": "^9.1.0" } }, - "remark-stringify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-1.1.0.tgz", - "integrity": "sha1-pxBeJbnuK/mkm3XSxCPxGwauIJI=", - "dev": true, - "requires": { - "ccount": "^1.0.0", - "extend": "^3.0.0", - "longest-streak": "^1.0.0", - "markdown-table": "^0.4.0", - "parse-entities": "^1.0.2", - "repeat-string": "^1.5.4", - "stringify-entities": "^1.0.1", - "unherit": "^1.0.4" - }, - "dependencies": { - "parse-entities": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", - "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", - "dev": true, - "requires": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - }, - "stringify-entities": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-1.3.2.tgz", - "integrity": "sha512-nrBAQClJAPN2p+uGCVJRPIPakKeKWZ9GtBCmormE7pWOSlHat7+x5A8gx85M7HM5Dt0BP3pP5RhVW77WdbJJ3A==", - "dev": true, - "requires": { - "character-entities-html4": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-hexadecimal": "^1.0.0" - } - } - } - }, "repeat-string": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", @@ -2000,34 +1597,12 @@ "vfile": "^4.0.0" } }, - "trim": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", - "integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=", - "dev": true - }, - "trim-trailing-lines": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.3.tgz", - "integrity": "sha512-4ku0mmjXifQcTVfYDfR5lpgV7zVqPg6zV9rdZmwOPqq0+Zq19xDqEgagqVbc4pOOShbncuAOIs59R3+3gcF3ZA==", - "dev": true - }, "trough": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "dev": true }, - "unherit": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz", - "integrity": "sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ==", - "dev": true, - "requires": { - "inherits": "^2.0.0", - "xtend": "^4.0.0" - } - }, "unified": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz", @@ -2049,13 +1624,12 @@ "dev": true }, "unist-util-find": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/unist-util-find/-/unist-util-find-1.0.1.tgz", - "integrity": "sha1-EGK7tpKMepfGrcibU3RdTEbCIqI=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unist-util-find/-/unist-util-find-1.0.2.tgz", + "integrity": "sha512-ft06UDYzqi9o9RmGP0sZWI/zvLLQiBW2/MD+rW6mDqbOWDcmknGX9orQPspfuGRYWr8eSJAmfsBcvOpfGRJseA==", "dev": true, "requires": { "lodash.iteratee": "^4.5.0", - "remark": "^5.0.1", "unist-util-visit": "^1.1.0" }, "dependencies": { @@ -2193,12 +1767,6 @@ "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", "dev": true }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", diff --git a/tools/doc/package.json b/tools/doc/package.json index d396e23248a63c..3a5d9c0139d56b 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -17,7 +17,7 @@ "remark-rehype": "7.0.0", "to-vfile": "6.1.0", "unified": "9.2.0", - "unist-util-find": "1.0.1", + "unist-util-find": "^1.0.2", "unist-util-select": "3.0.1", "unist-util-visit": "2.0.3" }, From 4e3883ec2de29aa780cb310a364847812a5afdd5 Mon Sep 17 00:00:00 2001 From: Baruch Odem Date: Sun, 8 Nov 2020 13:23:04 +0200 Subject: [PATCH 80/98] win,build,tools: support VS prerelease PR-URL: https://github.com/nodejs/node/pull/36033 Reviewed-By: Bartosz Sosnowski Reviewed-By: Rich Trott Reviewed-By: James M Snell --- tools/msvs/vswhere_usability_wrapper.cmd | 2 +- vcbuild.bat | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/msvs/vswhere_usability_wrapper.cmd b/tools/msvs/vswhere_usability_wrapper.cmd index 0c7cac1e4c4aa2..45ca5b2164a3b3 100644 --- a/tools/msvs/vswhere_usability_wrapper.cmd +++ b/tools/msvs/vswhere_usability_wrapper.cmd @@ -5,7 +5,7 @@ @if not defined DEBUG_HELPER @ECHO OFF setlocal -if "%~1"=="prerelease" set VSWHERE_WITH_PRERELEASE=1 +if "%~2"=="prerelease" set VSWHERE_WITH_PRERELEASE=1 set "InstallerPath=%ProgramFiles(x86)%\Microsoft Visual Studio\Installer" if not exist "%InstallerPath%" set "InstallerPath=%ProgramFiles%\Microsoft Visual Studio\Installer" if not exist "%InstallerPath%" goto :no-vswhere diff --git a/vcbuild.bat b/vcbuild.bat index 179da60d2646a2..777842de209d11 100644 --- a/vcbuild.bat +++ b/vcbuild.bat @@ -250,7 +250,7 @@ echo Looking for Visual Studio 2019 @rem cleared first as vswhere_usability_wrapper.cmd doesn't when it fails to @rem detect the version searched for if not defined target_env set "VCINSTALLDIR=" -call tools\msvs\vswhere_usability_wrapper.cmd "[16.0,17.0)" +call tools\msvs\vswhere_usability_wrapper.cmd "[16.0,17.0)" "prerelease" if "_%VCINSTALLDIR%_" == "__" goto msbuild-not-found set "WIXSDKDIR=%WIX%\SDK\VS2017" if defined msi ( From c23ee3744faeb50c036bab0c05994d6761bfc0a5 Mon Sep 17 00:00:00 2001 From: Ruy Adorno Date: Tue, 17 Nov 2020 15:37:44 -0500 Subject: [PATCH 81/98] deps: upgrade npm to 7.0.12 PR-URL: https://github.com/nodejs/node/pull/36153 Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott Reviewed-By: Ruben Bridgewater Reviewed-By: Gireesh Punathil Reviewed-By: Luigi Pinca Reviewed-By: Trivikram Kamat --- deps/npm/.eslintrc.json | 2 +- deps/npm/AUTHORS | 1 + deps/npm/CHANGELOG.md | 27 + deps/npm/docs/content/commands/npm-audit.md | 4 +- deps/npm/docs/output/commands/npm-audit.html | 4 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm.html | 2 +- deps/npm/lib/audit.js | 4 +- deps/npm/lib/ci.js | 4 +- deps/npm/lib/dedupe.js | 4 +- deps/npm/lib/init.js | 15 +- deps/npm/lib/install.js | 63 +- deps/npm/lib/link.js | 6 +- deps/npm/lib/prune.js | 4 +- deps/npm/lib/run-script.js | 2 +- deps/npm/lib/uninstall.js | 4 +- deps/npm/lib/update.js | 4 +- deps/npm/lib/utils/reify-finish.js | 31 + deps/npm/man/man1/npm-audit.1 | 4 +- deps/npm/man/man1/npm-ls.1 | 2 +- deps/npm/man/man1/npm.1 | 2 +- .../@npmcli/run-script/lib/make-spawn-args.js | 20 +- .../@npmcli/run-script/lib/run-script-pkg.js | 3 +- .../@npmcli/run-script/package.json | 3 +- deps/npm/node_modules/puka/CHANGELOG.md | 31 + deps/npm/node_modules/puka/LICENSE.txt | 18 + deps/npm/node_modules/puka/README.md | 411 ++++ deps/npm/node_modules/puka/index.js | 804 +++++++ deps/npm/node_modules/puka/package.json | 38 + deps/npm/package.json | 10 +- .../test-lib-init.js-TAP.test.js | 19 + ...test-lib-utils-reify-finish.js-TAP.test.js | 15 + deps/npm/test/bin/npm-cli.js | 2 +- deps/npm/test/bin/npx-cli.js | 18 +- deps/npm/test/lib/access.js | 166 +- deps/npm/test/lib/adduser.js | 32 +- deps/npm/test/lib/audit.js | 55 +- deps/npm/test/lib/auth/legacy.js | 70 +- deps/npm/test/lib/auth/oauth.js | 8 +- deps/npm/test/lib/auth/saml.js | 8 +- deps/npm/test/lib/auth/sso.js | 34 +- deps/npm/test/lib/bin.js | 6 +- deps/npm/test/lib/birthday.js | 2 + deps/npm/test/lib/bugs.js | 68 +- deps/npm/test/lib/cache.js | 36 +- deps/npm/test/lib/ci.js | 68 +- deps/npm/test/lib/cli.js | 47 +- deps/npm/test/lib/config.js | 68 +- deps/npm/test/lib/dedupe.js | 20 +- deps/npm/test/lib/dist-tag.js | 59 +- deps/npm/test/lib/docs.js | 48 +- deps/npm/test/lib/exec.js | 314 ++- deps/npm/test/lib/explain.js | 60 +- deps/npm/test/lib/explore.js | 100 +- deps/npm/test/lib/find-dupes.js | 4 +- deps/npm/test/lib/fund.js | 292 +-- deps/npm/test/lib/get.js | 6 +- deps/npm/test/lib/init.js | 211 ++ deps/npm/test/lib/install.js | 114 +- deps/npm/test/lib/link.js | 115 +- deps/npm/test/lib/ll.js | 8 +- deps/npm/test/lib/load-all-commands.js | 3 +- deps/npm/test/lib/load-all.js | 4 +- deps/npm/test/lib/logout.js | 18 +- deps/npm/test/lib/ls.js | 1944 +++++++++-------- deps/npm/test/lib/npm.js | 65 +- deps/npm/test/lib/outdated.js | 175 +- deps/npm/test/lib/owner.js | 131 +- deps/npm/test/lib/pack.js | 69 +- deps/npm/test/lib/ping.js | 14 +- deps/npm/test/lib/prefix.js | 2 +- deps/npm/test/lib/prune.js | 14 +- deps/npm/test/lib/repo.js | 186 +- deps/npm/test/lib/root.js | 2 +- deps/npm/test/lib/run-script.js | 260 +-- deps/npm/test/lib/test.js | 8 +- deps/npm/test/lib/token.js | 293 ++- deps/npm/test/lib/utils/audit-error.js | 34 +- deps/npm/test/lib/utils/cleanup-log-files.js | 20 +- .../lib/utils/completion/installed-deep.js | 122 +- .../lib/utils/completion/installed-shallow.js | 56 +- deps/npm/test/lib/utils/completion/none.js | 2 +- deps/npm/test/lib/utils/config.js | 49 +- deps/npm/test/lib/utils/error-handler.js | 52 +- deps/npm/test/lib/utils/error-message.js | 102 +- deps/npm/test/lib/utils/escape-arg.js | 2 +- deps/npm/test/lib/utils/escape-exec-path.js | 2 +- deps/npm/test/lib/utils/explain-dep.js | 2 +- deps/npm/test/lib/utils/explain-eresolve.js | 2 +- deps/npm/test/lib/utils/file-exists.js | 4 +- deps/npm/test/lib/utils/flat-options.js | 52 +- deps/npm/test/lib/utils/get-identity.js | 34 +- deps/npm/test/lib/utils/get-project-scope.js | 10 +- .../utils/hosted-git-info-from-manifest.js | 6 +- deps/npm/test/lib/utils/is-windows-bash.js | 4 +- deps/npm/test/lib/utils/is-windows-shell.js | 2 +- deps/npm/test/lib/utils/is-windows.js | 2 +- deps/npm/test/lib/utils/lifecycle-cmd.js | 6 +- deps/npm/test/lib/utils/path.js | 2 +- deps/npm/test/lib/utils/perf.js | 10 +- deps/npm/test/lib/utils/ping.js | 4 +- deps/npm/test/lib/utils/proc-log-listener.js | 16 +- deps/npm/test/lib/utils/read-local-package.js | 16 +- deps/npm/test/lib/utils/reify-finish.js | 80 + deps/npm/test/lib/utils/reify-output.js | 152 +- deps/npm/test/lib/utils/setup-log.js | 85 +- deps/npm/test/lib/utils/tar.js | 55 +- deps/npm/test/lib/utils/unsupported.js | 6 +- deps/npm/test/lib/utils/update-notifier.js | 33 +- deps/npm/test/lib/view.js | 451 ++-- deps/npm/test/lib/whoami.js | 4 +- 111 files changed, 5022 insertions(+), 3247 deletions(-) create mode 100644 deps/npm/lib/utils/reify-finish.js create mode 100644 deps/npm/node_modules/puka/CHANGELOG.md create mode 100644 deps/npm/node_modules/puka/LICENSE.txt create mode 100644 deps/npm/node_modules/puka/README.md create mode 100644 deps/npm/node_modules/puka/index.js create mode 100644 deps/npm/node_modules/puka/package.json create mode 100644 deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js create mode 100644 deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js create mode 100644 deps/npm/test/lib/init.js create mode 100644 deps/npm/test/lib/utils/reify-finish.js diff --git a/deps/npm/.eslintrc.json b/deps/npm/.eslintrc.json index 6232a8f82187ff..139716eefd85a0 100644 --- a/deps/npm/.eslintrc.json +++ b/deps/npm/.eslintrc.json @@ -133,7 +133,7 @@ "no-shadow-restricted-names": "error", "no-sparse-arrays": "error", "no-tabs": "error", - "no-template-curly-in-string": "error", + "no-template-curly-in-string": "off", "no-this-before-super": "error", "no-throw-literal": "off", "no-trailing-spaces": "error", diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS index c0986e8be2bf90..2405a171bc0c30 100644 --- a/deps/npm/AUTHORS +++ b/deps/npm/AUTHORS @@ -733,3 +733,4 @@ Jan Tojnar Jason Attwood Vlad GURDIGA Sébastien Puech +Jannis Hell diff --git a/deps/npm/CHANGELOG.md b/deps/npm/CHANGELOG.md index 8cafde1158491d..9c40ec41678d75 100644 --- a/deps/npm/CHANGELOG.md +++ b/deps/npm/CHANGELOG.md @@ -1,3 +1,30 @@ +## 7.0.12 (2020-11-17) + +### BUG FIXES + +* [`7b89576bd`](https://github.com/npm/cli/commit/7b89576bd1fa557a312a841afa66b895558d1b12) + [#2174](https://github.com/npm/cli/issues/2174) + fix running empty scripts with `npm run-script` + ([@nlf](https://github.com/nlf)) +* [`bc9afb195`](https://github.com/npm/cli/commit/bc9afb195f5aad7c06bc96049c0f00dc8e752dee) + [#2002](https://github.com/npm/cli/issues/2002) + [#2184](https://github.com/npm/cli/issues/2184) + Preserve builtin conf when installing npm globally + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES + +* [`b74c05d88`](https://github.com/npm/cli/commit/b74c05d88dc48fabef031ea66ffaa4e548845655) + `@npmcli/run-script@1.8.0` + * fix windows command-line argument escaping + +### DOCUMENTATION + +* [`4e522fdc9`](https://github.com/npm/cli/commit/4e522fdc917bc85af2ca8ff7669a0178e2f35123) + [#2179](https://github.com/npm/cli/issues/2179) + remove mention to --parseable option from `npm audit` docs + ([@Primajin](https://github.com/Primajin)) + ## 7.0.11 (2020-11-13) ### DEPENDENCIES diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md index 645ab87b157e17..2c0a8f58047ca2 100644 --- a/deps/npm/docs/content/commands/npm-audit.md +++ b/deps/npm/docs/content/commands/npm-audit.md @@ -7,8 +7,8 @@ description: Run a security audit ### Synopsis ```bash -npm audit [--json|--parseable|--audit-level=(low|moderate|high|critical)] -npm audit fix [--force|--package-lock-only|--dry-run] +npm audit [--json] [--production] [--audit-level=(low|moderate|high|critical)] +npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)] common options: [--production] [--only=(dev|prod)] ``` diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html index 4dd59417e82713..4482f6608999f2 100644 --- a/deps/npm/docs/output/commands/npm-audit.html +++ b/deps/npm/docs/output/commands/npm-audit.html @@ -145,8 +145,8 @@

Table of contents

Synopsis

-
npm audit [--json|--parseable|--audit-level=(low|moderate|high|critical)]
-npm audit fix [--force|--package-lock-only|--dry-run]
+
npm audit [--json] [--production] [--audit-level=(low|moderate|high|critical)]
+npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]
 
 common options: [--production] [--only=(dev|prod)]
 
diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 437f062c299e59..fa7ec7e0b0fd9e 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -156,7 +156,7 @@

Description

limit the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm’s source tree will show:

-
    npm@7.0.11 /path/to/npm
+
    npm@7.0.12 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
 
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 77cf24a8fb1e12..42f76ca7c4574a 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -148,7 +148,7 @@

Table of contents

npm <command> [args]
 

Version

-

7.0.11

+

7.0.12

Description

npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/lib/audit.js b/deps/npm/lib/audit.js index e77beab1eff61e..cb8ab5b3a43f5d 100644 --- a/deps/npm/lib/audit.js +++ b/deps/npm/lib/audit.js @@ -2,7 +2,7 @@ const Arborist = require('@npmcli/arborist') const auditReport = require('npm-audit-report') const npm = require('./npm.js') const output = require('./utils/output.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const auditError = require('./utils/audit-error.js') const audit = async args => { @@ -14,7 +14,7 @@ const audit = async args => { const fix = args[0] === 'fix' await arb.audit({ fix }) if (fix) - reifyOutput(arb) + await reifyFinish(arb) else { // will throw if there's an error, because this is an audit command auditError(arb.auditReport) diff --git a/deps/npm/lib/ci.js b/deps/npm/lib/ci.js index a1632e7e98064b..1255fbc2646fd4 100644 --- a/deps/npm/lib/ci.js +++ b/deps/npm/lib/ci.js @@ -1,7 +1,7 @@ const util = require('util') const Arborist = require('@npmcli/arborist') const rimraf = util.promisify(require('rimraf')) -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const log = require('npmlog') const npm = require('./npm.js') @@ -35,7 +35,7 @@ const ci = async () => { ]) // npm ci should never modify the lockfile or package.json await arb.reify({ ...npm.flatOptions, save: false }) - reifyOutput(arb) + await reifyFinish(arb) } module.exports = Object.assign(cmd, { completion, usage }) diff --git a/deps/npm/lib/dedupe.js b/deps/npm/lib/dedupe.js index a08c9f3f8f3349..fe8243e21e43d2 100644 --- a/deps/npm/lib/dedupe.js +++ b/deps/npm/lib/dedupe.js @@ -2,7 +2,7 @@ const npm = require('./npm.js') const Arborist = require('@npmcli/arborist') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const usage = usageUtil('dedupe', 'npm dedupe') const completion = require('./utils/completion/none.js') @@ -18,7 +18,7 @@ const dedupe = async (args) => { dryRun, }) await arb.dedupe(npm.flatOptions) - reifyOutput(arb) + await reifyFinish(arb) } module.exports = Object.assign(cmd, { usage, completion }) diff --git a/deps/npm/lib/init.js b/deps/npm/lib/init.js index e805a2eda7796c..ed476ef38cb284 100644 --- a/deps/npm/lib/init.js +++ b/deps/npm/lib/init.js @@ -1,11 +1,11 @@ -// initialize a package.json file - -const usageUtil = require('./utils/usage.js') -const completion = require('./utils/completion/none.js') +'use strict' +const initJson = require('init-package-json') const npa = require('npm-package-arg') + const npm = require('./npm.js') -const initJson = require('init-package-json') +const usageUtil = require('./utils/usage.js') +const completion = require('./utils/completion/none.js') const output = require('./utils/output.js') const usage = usageUtil( @@ -78,11 +78,12 @@ const init = async args => { npm.log.warn('init', 'canceled') return res() } - npm.log.info('init', 'written successfully') if (er) rej(er) - else + else { + npm.log.info('init', 'written successfully') res(data) + } }) }) } diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js index 5f04fcd4f9d6be..f621c85c23e1e2 100644 --- a/deps/npm/lib/install.js +++ b/deps/npm/lib/install.js @@ -6,13 +6,15 @@ const util = require('util') const readdir = util.promisify(fs.readdir) const npm = require('./npm.js') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const log = require('npmlog') const { resolve, join } = require('path') const Arborist = require('@npmcli/arborist') const runScript = require('@npmcli/run-script') -const install = async (args, cb) => { +const cmd = async (args, cb) => install(args).then(() => cb()).catch(cb) + +const install = async args => { // the /path/to/node_modules/.. const globalTop = resolve(npm.globalDir, '..') const { ignoreScripts, global: isGlobalInstall } = npm.flatOptions @@ -34,38 +36,33 @@ const install = async (args, cb) => { path: where, }) - try { - await arb.reify({ - ...npm.flatOptions, - add: args, - }) - if (!args.length && !isGlobalInstall && !ignoreScripts) { - const { scriptShell } = npm.flatOptions - const scripts = [ - 'preinstall', - 'install', - 'postinstall', - 'prepublish', // XXX should we remove this finally?? - 'preprepare', - 'prepare', - 'postprepare', - ] - for (const event of scripts) { - await runScript({ - path: where, - args: [], - scriptShell, - stdio: 'inherit', - stdioString: true, - event, - }) - } + await arb.reify({ + ...npm.flatOptions, + add: args, + }) + if (!args.length && !isGlobalInstall && !ignoreScripts) { + const { scriptShell } = npm.flatOptions + const scripts = [ + 'preinstall', + 'install', + 'postinstall', + 'prepublish', // XXX should we remove this finally?? + 'preprepare', + 'prepare', + 'postprepare', + ] + for (const event of scripts) { + await runScript({ + path: where, + args: [], + scriptShell, + stdio: 'inherit', + stdioString: true, + event, + }) } - reifyOutput(arb) - cb() - } catch (er) { - cb(er) } + await reifyFinish(arb) } const usage = usageUtil( @@ -144,4 +141,4 @@ const completion = async (opts, cb) => { cb() } -module.exports = Object.assign(install, { usage, completion }) +module.exports = Object.assign(cmd, { usage, completion }) diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js index d7303fd086cdd9..bee44d43a7ff62 100644 --- a/deps/npm/lib/link.js +++ b/deps/npm/lib/link.js @@ -10,7 +10,7 @@ const semver = require('semver') const npm = require('./npm.js') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const completion = (opts, cb) => { const dir = npm.globalDir @@ -122,7 +122,7 @@ const linkInstall = async args => { add: names.map(l => `file:${resolve(globalTop, 'node_modules', l)}`), }) - reifyOutput(localArb) + await reifyFinish(localArb) } const linkPkg = async () => { @@ -133,7 +133,7 @@ const linkPkg = async () => { global: true, }) await arb.reify({ add: [`file:${npm.prefix}`] }) - reifyOutput(arb) + await reifyFinish(arb) } module.exports = Object.assign(cmd, { completion, usage }) diff --git a/deps/npm/lib/prune.js b/deps/npm/lib/prune.js index aa2ed378088e37..ea6ed4108aba28 100644 --- a/deps/npm/lib/prune.js +++ b/deps/npm/lib/prune.js @@ -3,7 +3,7 @@ const npm = require('./npm.js') const Arborist = require('@npmcli/arborist') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const usage = usageUtil('prune', 'npm prune [[<@scope>/]...] [--production]' @@ -19,7 +19,7 @@ const prune = async () => { path: where, }) await arb.prune(npm.flatOptions) - reifyOutput(arb) + await reifyFinish(arb) } module.exports = Object.assign(cmd, { usage, completion }) diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js index 568a5712f6ac79..c095e6decd4032 100644 --- a/deps/npm/lib/run-script.js +++ b/deps/npm/lib/run-script.js @@ -46,7 +46,7 @@ const runScript = async (args) => { pkg.scripts = scripts - if (!scripts[event] && !(event === 'start' && await isServerPackage(path))) { + if (!Object.prototype.hasOwnProperty.call(scripts, event) && !(event === 'start' && await isServerPackage(path))) { if (npm.config.get('if-present')) return diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js index ec997ae6457ab6..dbaa992f500e05 100644 --- a/deps/npm/lib/uninstall.js +++ b/deps/npm/lib/uninstall.js @@ -5,7 +5,7 @@ const npm = require('./npm.js') const rpj = require('read-package-json-fast') const { resolve } = require('path') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const cmd = (args, cb) => rm(args).then(() => cb()).catch(cb) @@ -32,7 +32,7 @@ const rm = async args => { ...npm.flatOptions, rm: args, }) - reifyOutput(arb) + await reifyFinish(arb) } const usage = usageUtil( diff --git a/deps/npm/lib/update.js b/deps/npm/lib/update.js index 791e67e407643a..0a786e30f312e9 100644 --- a/deps/npm/lib/update.js +++ b/deps/npm/lib/update.js @@ -4,7 +4,7 @@ const Arborist = require('@npmcli/arborist') const log = require('npmlog') const npm = require('./npm.js') const usageUtil = require('./utils/usage.js') -const reifyOutput = require('./utils/reify-output.js') +const reifyFinish = require('./utils/reify-finish.js') const completion = require('./utils/completion/installed-deep.js') const usage = usageUtil( @@ -32,7 +32,7 @@ const update = async args => { }) await arb.reify({ update }) - reifyOutput(arb) + await reifyFinish(arb) } module.exports = Object.assign(cmd, { usage, completion }) diff --git a/deps/npm/lib/utils/reify-finish.js b/deps/npm/lib/utils/reify-finish.js new file mode 100644 index 00000000000000..76dba06cb570c8 --- /dev/null +++ b/deps/npm/lib/utils/reify-finish.js @@ -0,0 +1,31 @@ +const reifyOutput = require('./reify-output.js') +const npm = require('../npm.js') +const ini = require('ini') +const {writeFile} = require('fs').promises +const {resolve} = require('path') + +const reifyFinish = async arb => { + await saveBuiltinConfig(arb) + reifyOutput(arb) +} + +const saveBuiltinConfig = async arb => { + const { options: { global }, actualTree } = arb + if (!global) + return + + // if we are using a builtin config, and just installed npm as + // a top-level global package, we have to preserve that config. + const npmNode = actualTree.inventory.get('node_modules/npm') + if (!npmNode) + return + + const builtinConf = npm.config.data.get('builtin') + if (builtinConf.loadError) + return + + const content = ini.stringify(builtinConf.raw).trim() + '\n' + await writeFile(resolve(npmNode.path, 'npmrc'), content) +} + +module.exports = reifyFinish diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index ebd91e8198d554..cd52afeac38d09 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -5,8 +5,8 @@ .P .RS 2 .nf -npm audit [\-\-json|\-\-parseable|\-\-audit\-level=(low|moderate|high|critical)] -npm audit fix [\-\-force|\-\-package\-lock\-only|\-\-dry\-run] +npm audit [\-\-json] [\-\-production] [\-\-audit\-level=(low|moderate|high|critical)] +npm audit fix [\-\-force|\-\-package\-lock\-only|\-\-dry\-run|\-\-production|\-\-only=(dev|prod)] common options: [\-\-production] [\-\-only=(dev|prod)] .fi diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 67c5234ef446c0..87d6c7ae99da4f 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -22,7 +22,7 @@ For example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf - npm@7\.0\.11 /path/to/npm + npm@7\.0\.12 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 32e4a5b4254ae4..1464bc383b4116 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -7\.0\.11 +7\.0\.12 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js index 181be8493f3343..aa241d5e61890b 100644 --- a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js +++ b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js @@ -3,6 +3,24 @@ const isWindows = require('./is-windows.js') const setPATH = require('./set-path.js') const {resolve} = require('path') const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js') +const { quoteForShell, ShellString, ShellStringText, ShellStringUnquoted } = require('puka') + +const escapeCmd = cmd => { + const result = [] + const parsed = ShellString.sh([cmd]) + for (const child of parsed.children) { + if (child instanceof ShellStringText) { + const children = child.contents.filter(segment => segment !== null).map(segment => quoteForShell(segment, false, isWindows && 'win32')) + result.push(...children) + } else if (child instanceof ShellStringUnquoted) { + result.push(child.value) + } else { + result.push(isWindows ? '&' : ';') + } + } + + return result.join('') +} const makeSpawnArgs = options => { const { @@ -16,7 +34,7 @@ const makeSpawnArgs = options => { } = options const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(scriptShell) - const args = isCmd ? ['/d', '/s', '/c', `"${cmd}"`] : ['-c', cmd] + const args = isCmd ? ['/d', '/s', '/c', escapeCmd(cmd)] : ['-c', escapeCmd(cmd)] const spawnOpts = { env: setPATH(path, { diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js index 47f386304e4596..ccde173e014f6c 100644 --- a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js +++ b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js @@ -6,7 +6,8 @@ const signalManager = require('./signal-manager.js') const isServerPackage = require('./is-server-package.js') // you wouldn't like me when I'm angry... -const bruce = (id, event, cmd) => `\n> ${id ? id + ' ' : ''}${event}\n> ${cmd}\n` +const bruce = (id, event, cmd) => + `\n> ${id ? id + ' ' : ''}${event}\n> ${cmd.trim().replace(/\n/g, '\n> ')}\n` const runScriptPkg = async options => { const { diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json index c8a052f036763b..925e85c06173e1 100644 --- a/deps/npm/node_modules/@npmcli/run-script/package.json +++ b/deps/npm/node_modules/@npmcli/run-script/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/run-script", - "version": "1.7.5", + "version": "1.8.0", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "Isaac Z. Schlueter (https://izs.me)", "license": "ISC", @@ -32,6 +32,7 @@ "@npmcli/promise-spawn": "^1.3.0", "infer-owner": "^1.0.4", "node-gyp": "^7.1.0", + "puka": "^1.0.1", "read-package-json-fast": "^1.1.3" }, "files": [ diff --git a/deps/npm/node_modules/puka/CHANGELOG.md b/deps/npm/node_modules/puka/CHANGELOG.md new file mode 100644 index 00000000000000..781b81295a4a7f --- /dev/null +++ b/deps/npm/node_modules/puka/CHANGELOG.md @@ -0,0 +1,31 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.0.1](https://gitlab.com/rhendric/puka/-/compare/v1.0.0...v1.0.1) - 2020-05-16 + +### Fixed + +- Add more carets to win32 command arguments ([45965ca](https://gitlab.com/rhendric/puka/-/commit/45965ca60fcc518082e0b085d8e81f3f3279ffb4)) + + As previously documented and implemented, Puka assumed that all programs + are batch files for the purpose of multi-escaping commands that appear + in pipelines. However, regardless of whether a command is in a pipeline, + one extra layer of escaping is needed if the command invokes a batch + file, which Puka was not producing. This only applies to the arguments + to the command, not to the batch file path, nor to paths used in + redirects. (The property-based spawn test which was supposed to catch + such oversights missed this one because it was invoking the Node.js + executable directly, not, as recommended in the documentation, a batch + file.) + + Going forward, the caveats described in the documentation continue to + apply: if you are running programs on Windows with Puka, make sure they + are batch files, or you may find arguments are being escaped with too + many carets. As the documentation says, if this causes problems for you, + please open an issue so we can work out the details of what a good + workaround looks like. + +## [1.0.0](https://gitlab.com/rhendric/puka/-/tags/v1.0.0) - 2017-09-29 diff --git a/deps/npm/node_modules/puka/LICENSE.txt b/deps/npm/node_modules/puka/LICENSE.txt new file mode 100644 index 00000000000000..0141196a593376 --- /dev/null +++ b/deps/npm/node_modules/puka/LICENSE.txt @@ -0,0 +1,18 @@ +Copyright 2017 Ryan Hendrickson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/puka/README.md b/deps/npm/node_modules/puka/README.md new file mode 100644 index 00000000000000..edbda4d3a29fb9 --- /dev/null +++ b/deps/npm/node_modules/puka/README.md @@ -0,0 +1,411 @@ +# Puka + +[![GitLab CI pipeline status](https://gitlab.com/rhendric/puka/badges/master/pipeline.svg)](https://gitlab.com/rhendric/puka/commits/master) [![AppVeyor build status](https://img.shields.io/appveyor/ci/rhendric/puka.svg?label=windows%20tests)](https://ci.appveyor.com/project/rhendric/puka) [![Codecov status](https://img.shields.io/codecov/c/gl/rhendric/puka.svg)](https://codecov.io/gl/rhendric/puka) + +Puka is a cross-platform library for safely passing strings through shells. + +#### Contents + +- [Introduction](#introduction) + - [Why would I use Puka?](#why-would-i-use-puka) + - [How do I use Puka?](#how-do-i-use-puka) + - [What's the catch?](#whats-the-catch) +- [API Documentation](#api-documentation) + - [Basic API](#basic-api) + - [sh](#sh) + - [unquoted](#unquoted) + - [Advanced API](#advanced-api) + - [quoteForShell](#quoteforshell) + - [quoteForCmd](#quoteforcmd) + - [quoteForSh](#quoteforsh) + - [ShellString](#shellstring) + - [Secret API](#secret-api) +- [The sh DSL](#the-sh-dsl) + - [Syntax](#syntax) + - [Semantics](#semantics) + - [Types of placeholders](#types-of-placeholders) + +## Introduction + +### Why would I use Puka? + +When launching a child process from Node, you have a choice between launching +directly from the operating system (as with [child_process.spawn](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options), +if you don't use the `{ shell: true }` option), and running the command through +a shell (as with [child_process.exec](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback)). +Using a shell gives you more power, such as the ability to chain multiple +commands together or use redirection, but you have to construct your command as +a single string instead of using an array of arguments. And doing that can be +buggy (if not dangerous) if you don't take care to quote any arguments +correctly for the shell you're targeting, _and_ the quoting has to be done +differently on Windows and non-Windows shells. + +Puka solves that problem by giving you a simple and platform-agnostic way to +build shell commands with arguments that pass through your shell unaltered and +with no unsafe side effects, **whether you are running on Windows or a +Unix-based OS**. + +### How do I use Puka? + +Puka gives you an `sh` function intended for tagging +[template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals), +which quotes (if necessary) any values interpolated into the template. A simple +example: + +```javascript +const { sh } = require('puka'); +const { execSync } = require('child_process'); + +const arg = 'file with spaces.txt'; +execSync(sh`some-command ${arg}`); +``` + +But Puka supports more than this! See [the `sh` DSL documentation](#the-sh-dsl) +for a detailed description of all the features currently supported. + +### What's the catch? + +Here are the ones I know about: + +Puka does _not_ ensure that the actual commands you're running are +cross-platform. If you're running npm programs, you generally won't have a +problem with that, but if you want to run ``sh`cat file` `` on Windows, you'll +need to depend on something like +[cash-cat](https://www.npmjs.com/package/cash-cat). + +I searched for days for a way to quote or escape line breaks in arguments to +`cmd.exe`, but couldn't find one (regular `^`-prepending and quotation marks +don't seem to cut it). If you know of a way that works, please [open an +issue](https://gitlab.com/rhendric/puka/issues/new) to tell me about it! Until +then, any line break characters (`\r` or `\n`) in values being interpolated by +`sh` will cause an error to be thrown on Windows only. + +Also on Windows, you may notice quoting mistakes if you run commands that +involve invoking a native executable (not a batch file ending in `.cmd` or +`.bat`). Unfortunately, batch files require some extra escaping on Windows, and +Puka assumes all programs are batch files because npm creates batch file shims +for programs it installs (and, if you care about cross-platform, you'll be +using npm programs in your commands). If this causes problems for you, please +[open an issue](https://gitlab.com/rhendric/puka/issues/new); if your situation +is specific enough, there may be workarounds or improvements to Puka to be +found. + +## API Documentation + +### Basic API + + + + +#### sh + +A string template tag for safely constructing cross-platform shell commands. + +An `sh` template is not actually treated as a literal string to be +interpolated; instead, it is a tiny DSL designed to make working with shell +strings safe, simple, and straightforward. To get started quickly, see the +examples below. [More detailed documentation][1] is available +further down. + +##### Examples + +```javascript +const title = '"this" & "that"'; +sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'" +// Note: these examples show results for non-Windows platforms. +// On Windows, the above would instead be +// 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'. + +const names = ['file1', 'file 2']; +sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'" + +const cmd1 = ['cat', 'file 1.txt', 'file 2.txt']; +const cmd2 = ['use-input', '-abc']; +sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc" +``` + +Returns **[String][2]** a string formatted for the platform Node is currently +running on. + +#### unquoted + +This function permits raw strings to be interpolated into a `sh` template. + +**IMPORTANT**: If you're using Puka due to security concerns, make sure you +don't pass any untrusted content to `unquoted`. This may be obvious, but +stray punctuation in an `unquoted` section can compromise the safety of the +entire shell command. + +##### Parameters + +- `value` any value (it will be treated as a string) + +##### Examples + +```javascript +const both = true; +sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar' +``` + +### Advanced API + +If these functions make life easier for you, go ahead and use them; they +are just as well supported as the above. But if you aren't certain you +need them, you probably don't. + + +#### quoteForShell + +Quotes a string for injecting into a shell command. + +This function is exposed for some hypothetical case when the `sh` DSL simply +won't do; `sh` is expected to be the more convenient option almost always. +Compare: + +```javascript +console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join('')); +console.log(sh`cmd ${args}`); // same as above + +console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join('')); +console.log(sh`cmd "${args}"`); // same as above +``` + +Additionally, on Windows, `sh` checks the entire command string for pipes, +which subtly change how arguments need to be quoted. If your commands may +involve pipes, you are strongly encouraged to use `sh` and not try to roll +your own with `quoteForShell`. + +##### Parameters + +- `text` **[String][2]** to be quoted +- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string + is already safe. Defaults to `false`. +- `platform` **[String][2]?** a value that `process.platform` might take: + `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. + When omitted, effectively the same as `process.platform`. + +Returns **[String][2]** a string that is safe for the current (or specified) +platform. + +#### quoteForCmd + +A Windows-specific version of [quoteForShell][4]. + +##### Parameters + +- `text` **[String][2]** to be quoted +- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string + is already safe. Defaults to `false`. + +#### quoteForSh + +A Unix-specific version of [quoteForShell][4]. + +##### Parameters + +- `text` **[String][2]** to be quoted +- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string + is already safe. Defaults to `false`. + +#### ShellString + +A ShellString represents a shell command after it has been interpolated, but +before it has been formatted for a particular platform. ShellStrings are +useful if you want to prepare a command for a different platform than the +current one, for instance. + +To create a ShellString, use `ShellString.sh` the same way you would use +top-level `sh`. + +##### toString + +A method to format a ShellString into a regular String formatted for a +particular platform. + +###### Parameters + +- `platform` **[String][2]?** a value that `process.platform` might take: + `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. + When omitted, effectively the same as `process.platform`. + +Returns **[String][2]** + +##### sh + +`ShellString.sh` is a template tag just like `sh`; the only difference is +that this function returns a ShellString which has not yet been formatted +into a String. + +Returns **[ShellString][5]** + +### Secret API + +Some internals of string formatting have been exposed for the ambitious and +brave souls who want to try to extend Puka to handle more shells or custom +interpolated values. This ‘secret’ API is partially documented in the code +but not here, and the semantic versioning guarantees on this API are bumped +down by one level: in other words, minor version releases of Puka can change +the secret API in backward-incompatible ways, and patch releases can add or +deprecate functionality. + +If it's not even documented in the code, use at your own risk—no semver +guarantees apply. + + +[1]: #the-sh-dsl + +[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String + +[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean + +[4]: #quoteforshell + +[5]: #shellstring + +## The sh DSL + +### Syntax + +An `sh` template comprises words, separated by whitespace. Words can contain: + +- text, which is composed of any characters that are not whitespace, single or + double quotes, or any of the special characters + ``# $ & ( ) ; < > \ ` |``; +- quotations, which are matching single or double quotes surrounding any + characters other than the delimiting quote; and +- placeholders, using the standard JavaScript template syntax (`${}`). + (Placeholders may also appear inside quotations.) + +The special characters ``# $ & ( ) ; < > \ ` |``, if unquoted, form their own +words. + +Redirect operators (`<`, `>`, `>>`, `2>`, etc.) receive their own special +handling, as do semicolons. Other than these two exceptions, no attempt is made +to understand any more sophisticated features of shell syntax. + +Standard JavaScript escape sequences, such as `\t`, are honored in the template +literal, and are treated equivalently to the characters they represent. There +is no further mechanism for escaping within the `sh` DSL itself; in particular, +if you want to put quotes inside quotes, you have to use interpolation, like +this: + +```javascript +sh`echo "${'single = \', double = "'}"` // => "echo 'single = '\\'', double = \"'" +``` + +### Semantics + +Words that do not contain placeholders are emitted mostly verbatim to the +output string. Quotations are formatted in the expected style for the target +platform (single quotes for Unix, double quotes for Windows) regardless of the +quotes used in the template literal—as with JavaScript, single and double quotes +are interchangeable, except for the requirement to pair like with like. Unquoted +semicolons are translated to ampersands on Windows; all other special characters +(as enumerated above), when unquoted, are passed as-is to the output for the +shell to interpret. + +Puka may still quote words not containing the above special characters, if they +contain characters that need quoting on the target platform. For example, on +Windows, the character `%` is used for variable interpolation in `cmd.exe`, and +Puka quotes it on on that platform even if it appears unquoted in the template +literal. Consequently, there is no need to be paranoid about quoting anything +that doesn't look alphanumeric inside a `sh` template literal, for fear of being +burned on a different operating system; anything that matches the definition of +‘text’ above will never need manual quoting. + +#### Types of placeholders + +##### Strings + +If a word contains a string placeholder, then the value of the placeholder is +interpolated into the word and the entire word, if necessary, is quoted. If +the placeholder occurs within quotes, no further quoting is performed: + +```javascript +sh`script --file="${'herp derp'}.txt"`; // => "script --file='herp derp.txt'" +``` + +This behavior can be exploited to force consistent quoting, if desired; but +both of the examples below are safe on all platforms: + +```javascript +const words = ['oneword', 'two words']; +sh`minimal ${words[0]}`; // => "minimal oneword" +sh`minimal ${words[1]}`; // => "minimal 'two words'" +sh`consistent '${words[0]}'`; // => "consistent 'oneword'" +sh`consistent '${words[1]}'`; // => "consistent 'two words'" +``` + +##### Arrays and iterables + +If a word contains a placeholder for an array (or other iterable object), then +the entire word is repeated once for each value in the array, separated by +spaces. If the array is empty, then the word is not emitted at all, and neither +is any leading whitespace. + +```javascript +const files = ['foo', 'bar']; +sh`script ${files}`; // => "script foo bar" +sh`script --file=${files}`; // => "script --file=foo --file=bar" +sh`script --file=${[]}`; // => "script" +``` + +Note that, since special characters are their own words, the pipe operator here +is not repeated: + +```javascript +const cmd = ['script', 'foo', 'bar']; +sh`${cmd}|another-script`; // => "script foo bar|another-script" +``` + +Multiple arrays in the same word generate a Cartesian product: + +```javascript +const names = ['foo', 'bar'], exts = ['log', 'txt']; +// Same word +sh`... ${names}.${exts}`; // => "... foo.log foo.txt bar.log bar.txt" +sh`... "${names} ${exts}"`; // => "... 'foo log' 'foo txt' 'bar log' 'bar txt'" + +// Not the same word (extra space just for emphasis): +sh`... ${names} ${exts}`; // => "... foo bar log txt" +sh`... ${names};${exts}`; // => "... foo bar;log txt" +``` + +Finally, if a placeholder appears in the object of a redirect operator, the +entire redirect is repeated as necessary: + +```javascript +sh`script > ${['foo', 'bar']}.txt`; // => "script > foo.txt > bar.txt" +sh`script > ${[]}.txt`; // => "script" +``` + +##### unquoted + +The `unquoted` function returns a value that will skip being quoted when used +in a placeholder, alone or in an array. + +```javascript +const cmd = 'script < input.txt'; +const fields = ['foo', 'bar']; +sh`${unquoted(cmd)} | json ${fields}`; // => "script < input.txt | json foo bar" +``` + +##### ShellString + +If `ShellString.sh` is used to construct an unformatted ShellString, that value +can be used in a placeholder to insert the contents of the ShellString into the +outer template literal. This is safer than using `unquoted` as in the previous +example, but `unquoted` can be used when all you have is a string from another +(trusted!) source. + +```javascript +const url = 'http://example.com/data.json?x=1&y=2'; +const curl = ShellString.sh`curl -L ${url}`; +const fields = ['foo', 'bar']; +sh`${curl} | json ${fields}`; // => "curl -L 'http://example.com/data.json?x=1&y=2' | json foo bar" +``` + +##### Anything else + +... is treated like a string—namely, a value `x` is equivalent to `'' + x`, if +not in one of the above categories. diff --git a/deps/npm/node_modules/puka/index.js b/deps/npm/node_modules/puka/index.js new file mode 100644 index 00000000000000..b69e47d7639db9 --- /dev/null +++ b/deps/npm/node_modules/puka/index.js @@ -0,0 +1,804 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/** + * Key a method on your object with this symbol and you can get special + * formatting for that value! See ShellStringText, ShellStringUnquoted, or + * shellStringSemicolon for examples. + * @ignore + */ +const formatSymbol = Symbol('format'); +/** + * This symbol is for implementing advanced behaviors like the need for extra + * carets in Windows shell strings that use pipes. If present, it's called in + * an earlier phase than formatSymbol, and is passed a mutable context that can + * be read during the format phase to influence formatting. + * @ignore + */ +const preformatSymbol = Symbol('preformat'); + +// When minimum Node version becomes 6, replace calls to sticky with /.../y and +// inline execFrom. +let stickySupported = true; +try { + new RegExp('', 'y'); +} catch (e) { + stickySupported = false; +} +const sticky = stickySupported ? source => new RegExp(source, 'y') : source => new RegExp(`^(?:${source})`); +const execFrom = stickySupported ? (re, haystack, index) => (re.lastIndex = index, re.exec(haystack)) : (re, haystack, index) => re.exec(haystack.substr(index)); + +function quoteForCmd(text, forceQuote) { + let caretDepth = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; + // See the below blog post for an explanation of this function and + // quoteForWin32: + // https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ + if (!text.length) { + return '""'; + } + if (/[\n\r]/.test(text)) { + throw new Error("Line breaks can't be quoted on Windows"); + } + const caretEscape = /["%]/.test(text); + text = quoteForWin32(text, forceQuote || !caretEscape && /[&()<>^|]/.test(text)); + if (caretEscape) { + // See Win32Context for explanation of what caretDepth is for. + do { + text = text.replace(/[\t "%&()<>^|]/g, '^$&'); + } while (caretDepth--); + } + return text; +} +const quoteForWin32 = (text, forceQuote) => forceQuote || /[\t "]/.test(text) ? `"${text.replace(/\\+(?=$|")/g, '$&$&').replace(/"/g, '\\"')}"` : text; +const cmdMetaChars = /[\t\n\r "%&()<>^|]/; +class Win32Context { + constructor() { + this.currentScope = newScope(null); + this.scopesByObject = new Map(); + this.argDetectState = 0; + this.argSet = new Set(); + } + read(text) { + // When cmd.exe executes a batch file, or pipes to or from one, it spawns a + // second copy of itself to run the inner command. This necessitates + // doubling up on carets so that escaped characters survive both cmd.exe + // invocations. See: + // https://stackoverflow.com/questions/8192318/why-does-delayed-expansion-fail-when-inside-a-piped-block-of-code#8194279 + // https://ss64.com/nt/syntax-redirection.html + // + // Parentheses can create an additional subshell, requiring additional + // escaping... it's a mess. + // + // So here's what we do about it: we read all unquoted text in a shell + // string and put it through this tiny parser that looks for pipes, + // sequence operators (&, &&, ||), redirects, and parentheses. This can't + // be part of the main Puka parsing, because it can be affected by + // `unquoted(...)` values provided at evaluation time. + // + // Then, after associating each thing that needs to be quoted with a scope + // (via `mark()`), and identifying whether or not it's an argument to a + // command, we can determine the depth of caret escaping required in each + // scope and pass it (via `Formatter::quote()`) to `quoteForCmd()`. + // + // See also `ShellStringText`, which holds the logic for the previous + // paragraph. + const length = text.length; + for (let pos = 0, match; pos < length;) { + while (match = execFrom(reUnimportant, text, pos)) { + if (match[2] == null) { + // (not whitespace) + if (match[1] != null) { + // (>&) + this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0; + } else if (this.argDetectState !== ADS_FLAG_ARGS) { + this.argDetectState |= ADS_FLAG_WORD; + } + } else { + // (whitespace) + if ((this.argDetectState & ADS_FLAG_WORD) !== 0) { + this.argDetectState = ADS_FLAG_ARGS & ~this.argDetectState >> 1; + } + } + pos += match[0].length; + } + if (pos >= length) break; + if (match = execFrom(reSeqOp, text, pos)) { + this.seq(); + pos += match[0].length; + } else { + const char = text.charCodeAt(pos); + if (char === CARET) { + pos += 2; + } else if (char === QUOTE) { + // If you were foolish enough to leave a dangling quotation mark in + // an unquoted span... you're likely to have bigger problems than + // incorrect escaping. So we just do the simplest thing of looking for + // the end quote only in this piece of text. + pos += execFrom(reNotQuote, text, pos + 1)[0].length + 2; + } else { + if (char === OPEN_PAREN) { + this.enterScope(); + } else if (char === CLOSE_PAREN) { + this.exitScope(); + } else if (char === PIPE) { + this.pipe(); + } else { + // (char === '<' or '>') + this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0; + } + pos++; + } + } + } + } + enterScope() { + this.currentScope = newScope(this.currentScope); + this.argDetectState = 0; + } + exitScope() { + this.currentScope = this.currentScope.parent || (this.currentScope.parent = newScope(null)); + this.argDetectState = ADS_FLAG_ARGS; + } + seq() { + // | binds tighter than sequence operators, so the latter create new sibling + // scopes for future |s to mutate. + this.currentScope = newScope(this.currentScope.parent); + this.argDetectState = 0; + } + pipe() { + this.currentScope.depthDelta = 1; + this.argDetectState = 0; + } + mark(obj) { + this.scopesByObject.set(obj, this.currentScope); + if (this.argDetectState === ADS_FLAG_ARGS) { + this.argSet.add(obj); + } else { + this.argDetectState |= ADS_FLAG_WORD; + } + } + at(obj) { + const scope = this.scopesByObject.get(obj); + return { + depth: getDepth(scope), + isArgument: this.argSet.has(obj), + isNative: scope.isNative + }; + } +} +// These flags span the Win32Context's argument detection state machine. WORD +// is set when the context is inside a word that is not an argument (meaning it +// is either the first word in the command, or it is the object of a redirect). +// ARGS is set when the context has reached the arguments of a command. +// INITIAL_REDIRECT tracks the edge case when a redirect occurs before the +// first word of the command (if this flag is set, reaching the end of a word +// should take the state machine back to 0 instead of setting ADS_FLAG_ARGS). +const ADS_FLAG_WORD = 0x1; +const ADS_FLAG_ARGS = 0x2; +const ADS_FLAG_INITIAL_REDIRECT = 0x4; +const getDepth = scope => scope === null ? 0 : scope.depth !== -1 ? scope.depth : scope.depth = getDepth(scope.parent) + scope.depthDelta; +const newScope = parent => ({ + parent, + depthDelta: 0, + depth: -1, + isNative: false +}); +const CARET = '^'.charCodeAt(); +const QUOTE = '"'.charCodeAt(); +const OPEN_PAREN = '('.charCodeAt(); +const CLOSE_PAREN = ')'.charCodeAt(); +const PIPE = '|'.charCodeAt(); +const reNotQuote = sticky('[^"]*'); +const reSeqOp = sticky('&&?|\\|\\|'); +const reUnimportant = sticky('(\\d*>&)|[^\\s"$&()<>^|]+|(\\s+)'); + +const quoteForSh = (text, forceQuote) => text.length ? forceQuote || shMetaChars.test(text) ? `'${text.replace(/'/g, "'\\''")}'`.replace(/^(?:'')+(?!$)/, '').replace(/\\'''/g, "\\'") : text : "''"; +const shMetaChars = /[\t\n\r "#$&'()*;<>?\\`|~]/; + +/** + * To get a Formatter, call `Formatter.for`. + * + * To create a new Formatter, pass an object to `Formatter.declare`. + * + * To set the global default Formatter, assign to `Formatter.default`. + * + * @class + * @property {Formatter} default - The Formatter to be used when no platform + * is provided—for example, when creating strings with `sh`. + * @ignore + */ +function Formatter() {} +Object.assign(Formatter, +/** @lends Formatter */ +{ + /** + * Gets a Formatter that has been declared for the provided platform, or + * the base `'sh'` formatter if there is no Formatter specific to this + * platform, or the Formatter for the current platform if no specific platform + * is provided. + */ + for(platform) { + return platform == null ? Formatter.default || (Formatter.default = Formatter.for(process.platform)) : Formatter._registry.get(platform) || Formatter._registry.get('sh'); + }, + /** + * Creates a new Formatter or mutates the properties on an existing + * Formatter. The `platform` key on the provided properties object determines + * when the Formatter is retrieved. + */ + declare(props) { + const platform = props && props.platform || 'sh'; + const existingFormatter = Formatter._registry.get(platform); + const formatter = Object.assign(existingFormatter || new Formatter(), props); + formatter.emptyString === void 0 && (formatter.emptyString = formatter.quote('', true)); + existingFormatter || Formatter._registry.set(formatter.platform, formatter); + }, + _registry: new Map(), + prototype: { + platform: 'sh', + quote: quoteForSh, + metaChars: shMetaChars, + hasExtraMetaChars: false, + statementSeparator: ';', + createContext() { + return defaultContext; + } + } +}); +const defaultContext = { + at() {} +}; +Formatter.declare(); +Formatter.declare({ + platform: 'win32', + quote(text, forceQuote, opts) { + const caretDepth = opts ? (opts.depth || 0) + (opts.isArgument && !opts.isNative ? 1 : 0) : 0; + return quoteForCmd(text, forceQuote, caretDepth); + }, + metaChars: cmdMetaChars, + hasExtraMetaChars: true, + statementSeparator: '&', + createContext(root) { + const context = new this.Context(); + root[preformatSymbol](context); + return context; + }, + Context: Win32Context +}); + +const isObject = any => any === Object(any); +function memoize(f) { + const cache = new WeakMap(); + return arg => { + let result = cache.get(arg); + if (result === void 0) { + result = f(arg); + cache.set(arg, result); + } + return result; + }; +} + +/** + * Represents a contiguous span of text that may or must be quoted. The contents + * may already contain quoted segments, which will always be quoted. If unquoted + * segments also require quoting, the entire span will be quoted together. + * @ignore + */ +class ShellStringText { + constructor(contents, untested) { + this.contents = contents; + this.untested = untested; + } + [formatSymbol](formatter, context) { + const unformattedContents = this.contents; + const length = unformattedContents.length; + const contents = new Array(length); + for (let i = 0; i < length; i++) { + const c = unformattedContents[i]; + contents[i] = isObject(c) && formatSymbol in c ? c[formatSymbol](formatter) : c; + } + for (let unquoted = true, i = 0; i < length; i++) { + const content = contents[i]; + if (content === null) { + unquoted = !unquoted; + } else { + if (unquoted && (formatter.hasExtraMetaChars || this.untested && this.untested.has(i)) && formatter.metaChars.test(content)) { + return formatter.quote(contents.join(''), false, context.at(this)); + } + } + } + const parts = []; + for (let quoted = null, i = 0; i < length; i++) { + const content = contents[i]; + if (content === null) { + quoted = quoted ? (parts.push(formatter.quote(quoted.join(''), true, context.at(this))), null) : []; + } else { + (quoted || parts).push(content); + } + } + const result = parts.join(''); + return result.length ? result : formatter.emptyString; + } + [preformatSymbol](context) { + context.mark(this); + } +} + +/** + * Represents a contiguous span of text that will not be quoted. + * @ignore + */ +class ShellStringUnquoted { + constructor(value) { + this.value = value; + } + [formatSymbol]() { + return this.value; + } + [preformatSymbol](context) { + context.read(this.value); + } +} + +/** + * Represents a semicolon... or an ampersand, on Windows. + * @ignore + */ +const shellStringSemicolon = { + [formatSymbol](formatter) { + return formatter.statementSeparator; + }, + [preformatSymbol](context) { + context.seq(); + } +}; + +const PLACEHOLDER = {}; +const parse = memoize(templateSpans => { + // These are the token types our DSL can recognize. Their values won't escape + // this function. + const TOKEN_TEXT = 0; + const TOKEN_QUOTE = 1; + const TOKEN_SEMI = 2; + const TOKEN_UNQUOTED = 3; + const TOKEN_SPACE = 4; + const TOKEN_REDIRECT = 5; + const result = []; + let placeholderCount = 0; + let prefix = null; + let onlyPrefixOnce = false; + let contents = []; + let quote = 0; + const lastSpan = templateSpans.length - 1; + for (let spanIndex = 0; spanIndex <= lastSpan; spanIndex++) { + const templateSpan = templateSpans[spanIndex]; + const posEnd = templateSpan.length; + let tokenStart = 0; + if (spanIndex) { + placeholderCount++; + contents.push(PLACEHOLDER); + } + // For each span, we first do a recognizing pass in which we use regular + // expressions to identify the positions of tokens in the text, and then + // a second pass that actually splits the text into the minimum number of + // substrings necessary. + const recognized = []; // [type1, index1, type2, index2...] + let firstWordBreak = -1; + let lastWordBreak = -1; + { + let pos = 0, + match; + while (pos < posEnd) { + if (quote) { + if (match = execFrom(quote === CHAR_SQUO ? reQuotation1 : reQuotation2, templateSpan, pos)) { + recognized.push(TOKEN_TEXT, pos); + pos += match[0].length; + } + if (pos < posEnd) { + recognized.push(TOKEN_QUOTE, pos++); + quote = 0; + } + } else { + if (match = execFrom(reRedirectOrSpace, templateSpan, pos)) { + firstWordBreak < 0 && (firstWordBreak = pos); + lastWordBreak = pos; + recognized.push(match[1] ? TOKEN_REDIRECT : TOKEN_SPACE, pos); + pos += match[0].length; + } + if (match = execFrom(reText, templateSpan, pos)) { + const setBreaks = match[1] != null; + setBreaks && firstWordBreak < 0 && (firstWordBreak = pos); + recognized.push(setBreaks ? TOKEN_UNQUOTED : TOKEN_TEXT, pos); + pos += match[0].length; + setBreaks && (lastWordBreak = pos); + } + const char = templateSpan.charCodeAt(pos); + if (char === CHAR_SEMI) { + firstWordBreak < 0 && (firstWordBreak = pos); + recognized.push(TOKEN_SEMI, pos++); + lastWordBreak = pos; + } else if (char === CHAR_SQUO || char === CHAR_DQUO) { + recognized.push(TOKEN_QUOTE, pos++); + quote = char; + } + } + } + } + // Word breaks are only important if they separate words with placeholders, + // so we can ignore the first/last break if this is the first/last span. + spanIndex === 0 && (firstWordBreak = -1); + spanIndex === lastSpan && (lastWordBreak = posEnd); + // Here begins the second pass mentioned above. This loop runs one more + // iteration than there are tokens in recognized, because it handles tokens + // on a one-iteration delay; hence the i <= iEnd instead of i < iEnd. + const iEnd = recognized.length; + for (let i = 0, type = -1; i <= iEnd; i += 2) { + let typeNext = -1, + pos; + if (i === iEnd) { + pos = posEnd; + } else { + typeNext = recognized[i]; + pos = recognized[i + 1]; + // If the next token is space or redirect, but there's another word + // break in this span, then we can handle that token the same way we + // would handle unquoted text because it isn't being attached to a + // placeholder. + typeNext >= TOKEN_SPACE && pos !== lastWordBreak && (typeNext = TOKEN_UNQUOTED); + } + const breakHere = pos === firstWordBreak || pos === lastWordBreak; + if (pos && (breakHere || typeNext !== type)) { + let value = type === TOKEN_QUOTE ? null : type === TOKEN_SEMI ? shellStringSemicolon : templateSpan.substring(tokenStart, pos); + if (type >= TOKEN_SEMI) { + // This branch handles semicolons, unquoted text, spaces, and + // redirects. shellStringSemicolon is already a formatSymbol object; + // the rest need to be wrapped. + type === TOKEN_SEMI || (value = new ShellStringUnquoted(value)); + // We don't need to check placeholderCount here like we do below; + // that's only relevant during the first word break of the span, and + // because this iteration of the loop is processing the token that + // was checked for breaks in the previous iteration, it will have + // already been handled. For the same reason, prefix is guaranteed to + // be null. + if (contents.length) { + result.push(new ShellStringText(contents, null)); + contents = []; + } + // Only spaces and redirects become prefixes, but not if they've been + // rewritten to unquoted above. + if (type >= TOKEN_SPACE) { + prefix = value; + onlyPrefixOnce = type === TOKEN_SPACE; + } else { + result.push(value); + } + } else { + contents.push(value); + } + tokenStart = pos; + } + if (breakHere) { + if (placeholderCount) { + result.push({ + contents, + placeholderCount, + prefix, + onlyPrefixOnce + }); + } else { + // There's no prefix to handle in this branch; a prefix prior to this + // span would mean placeholderCount > 0, and a prefix in this span + // can't be created because spaces and redirects get rewritten to + // unquoted before the last word break. + contents.length && result.push(new ShellStringText(contents, null)); + } + placeholderCount = 0; + prefix = null; + onlyPrefixOnce = false; + contents = []; + } + type = typeNext; + } + } + if (quote) { + throw new SyntaxError(`String is missing a ${String.fromCharCode(quote)} character`); + } + return result; +}); +const CHAR_SEMI = ';'.charCodeAt(); +const CHAR_SQUO = "'".charCodeAt(); +const CHAR_DQUO = '"'.charCodeAt(); +const reQuotation1 = sticky("[^']+"); +const reQuotation2 = sticky('[^"]+'); +const reText = sticky('[^\\s"#$&\'();<>\\\\`|]+|([#$&()\\\\`|]+)'); +const reRedirectOrSpace = sticky('(\\s*\\d*[<>]+\\s*)|\\s+'); + +class BitSet { + constructor() { + this.vector = new Int32Array(1); + } + has(n) { + return (this.vector[n >>> 5] & 1 << n) !== 0; + } + add(n) { + const i = n >>> 5, + requiredLength = i + 1; + let vector = this.vector, + _vector = vector, + length = _vector.length; + if (requiredLength > length) { + while (requiredLength > (length *= 2)); + const oldValues = vector; + vector = new Int32Array(length); + vector.set(oldValues); + this.vector = vector; + } + vector[i] |= 1 << n; + } +} + +function evaluate(template, values) { + values = values.map(toStringishArray); + const children = []; + let valuesStart = 0; + for (let i = 0, iMax = template.length; i < iMax; i++) { + const word = template[i]; + if (formatSymbol in word) { + children.push(word); + continue; + } + const contents = word.contents, + placeholderCount = word.placeholderCount, + prefix = word.prefix, + onlyPrefixOnce = word.onlyPrefixOnce; + const kMax = contents.length; + const valuesEnd = valuesStart + placeholderCount; + const tuples = cartesianProduct(values, valuesStart, valuesEnd); + valuesStart = valuesEnd; + for (let j = 0, jMax = tuples.length; j < jMax; j++) { + const needSpace = j > 0; + const tuple = tuples[j]; + (needSpace || prefix) && children.push(needSpace && (onlyPrefixOnce || !prefix) ? unquotedSpace : prefix); + let interpolatedContents = []; + let untested = null; + let quoting = false; + let tupleIndex = 0; + for (let k = 0; k < kMax; k++) { + const content = contents[k]; + if (content === PLACEHOLDER) { + const value = tuple[tupleIndex++]; + if (quoting) { + interpolatedContents.push(value); + } else { + if (isObject(value) && formatSymbol in value) { + if (interpolatedContents.length) { + children.push(new ShellStringText(interpolatedContents, untested)); + interpolatedContents = []; + untested = null; + } + children.push(value); + } else { + (untested || (untested = new BitSet())).add(interpolatedContents.length); + interpolatedContents.push(value); + } + } + } else { + interpolatedContents.push(content); + content === null && (quoting = !quoting); + } + } + if (interpolatedContents.length) { + children.push(new ShellStringText(interpolatedContents, untested)); + } + } + } + return children; +} +const primToStringish = value => value == null ? '' + value : value; +function toStringishArray(value) { + let array; + switch (true) { + default: + if (isObject(value)) { + if (Array.isArray(value)) { + array = value; + break; + } + if (Symbol.iterator in value) { + array = Array.from(value); + break; + } + } + array = [value]; + } + return array.map(primToStringish); +} +function cartesianProduct(arrs, start, end) { + const size = end - start; + let resultLength = 1; + for (let i = start; i < end; i++) { + resultLength *= arrs[i].length; + } + if (resultLength > 1e6) { + throw new RangeError("Far too many elements to interpolate"); + } + const result = new Array(resultLength); + const indices = new Array(size).fill(0); + for (let i = 0; i < resultLength; i++) { + const value = result[i] = new Array(size); + for (let j = 0; j < size; j++) { + value[j] = arrs[j + start][indices[j]]; + } + for (let j = size - 1; j >= 0; j--) { + if (++indices[j] < arrs[j + start].length) break; + indices[j] = 0; + } + } + return result; +} +const unquotedSpace = new ShellStringUnquoted(' '); + +/** + * A ShellString represents a shell command after it has been interpolated, but + * before it has been formatted for a particular platform. ShellStrings are + * useful if you want to prepare a command for a different platform than the + * current one, for instance. + * + * To create a ShellString, use `ShellString.sh` the same way you would use + * top-level `sh`. + */ +class ShellString { + /** @hideconstructor */ + constructor(children) { + this.children = children; + } + /** + * `ShellString.sh` is a template tag just like `sh`; the only difference is + * that this function returns a ShellString which has not yet been formatted + * into a String. + * @returns {ShellString} + * @function sh + * @static + * @memberof ShellString + */ + static sh(templateSpans) { + for (var _len = arguments.length, values = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { + values[_key - 1] = arguments[_key]; + } + return new ShellString(evaluate(parse(templateSpans), values)); + } + /** + * A method to format a ShellString into a regular String formatted for a + * particular platform. + * + * @param {String} [platform] a value that `process.platform` might take: + * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. + * When omitted, effectively the same as `process.platform`. + * @returns {String} + */ + toString(platform) { + return this[formatSymbol](Formatter.for(platform)); + } + [formatSymbol](formatter) { + let context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : formatter.createContext(this); + return this.children.map(child => child[formatSymbol](formatter, context)).join(''); + } + [preformatSymbol](context) { + const children = this.children; + for (let i = 0, iMax = children.length; i < iMax; i++) { + const child = children[i]; + if (preformatSymbol in child) { + child[preformatSymbol](context); + } + } + } +} + +/** + * A Windows-specific version of {@link quoteForShell}. + * @param {String} text to be quoted + * @param {Boolean} [forceQuote] whether to always add quotes even if the string + * is already safe. Defaults to `false`. + */ + +/** + * A Unix-specific version of {@link quoteForShell}. + * @param {String} text to be quoted + * @param {Boolean} [forceQuote] whether to always add quotes even if the string + * is already safe. Defaults to `false`. + */ + +/** + * Quotes a string for injecting into a shell command. + * + * This function is exposed for some hypothetical case when the `sh` DSL simply + * won't do; `sh` is expected to be the more convenient option almost always. + * Compare: + * + * ```javascript + * console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join('')); + * console.log(sh`cmd ${args}`); // same as above + * + * console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join('')); + * console.log(sh`cmd "${args}"`); // same as above + * ``` + * + * Additionally, on Windows, `sh` checks the entire command string for pipes, + * which subtly change how arguments need to be quoted. If your commands may + * involve pipes, you are strongly encouraged to use `sh` and not try to roll + * your own with `quoteForShell`. + * + * @param {String} text to be quoted + * @param {Boolean} [forceQuote] whether to always add quotes even if the string + * is already safe. Defaults to `false`. + * @param {String} [platform] a value that `process.platform` might take: + * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted. + * When omitted, effectively the same as `process.platform`. + * + * @returns {String} a string that is safe for the current (or specified) + * platform. + */ +function quoteForShell(text, forceQuote, platform) { + return Formatter.for(platform).quote(text, forceQuote); +} + +/** + * A string template tag for safely constructing cross-platform shell commands. + * + * An `sh` template is not actually treated as a literal string to be + * interpolated; instead, it is a tiny DSL designed to make working with shell + * strings safe, simple, and straightforward. To get started quickly, see the + * examples below. {@link #the-sh-dsl More detailed documentation} is available + * further down. + * + * @name sh + * @example + * const title = '"this" & "that"'; + * sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'" + * // Note: these examples show results for non-Windows platforms. + * // On Windows, the above would instead be + * // 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'. + * + * const names = ['file1', 'file 2']; + * sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'" + * + * const cmd1 = ['cat', 'file 1.txt', 'file 2.txt']; + * const cmd2 = ['use-input', '-abc']; + * sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc" + * + * @returns {String} - a string formatted for the platform Node is currently + * running on. + */ +const sh = function () { + return ShellString.sh.apply(ShellString, arguments).toString(); +}; + +/** + * This function permits raw strings to be interpolated into a `sh` template. + * + * **IMPORTANT**: If you're using Puka due to security concerns, make sure you + * don't pass any untrusted content to `unquoted`. This may be obvious, but + * stray punctuation in an `unquoted` section can compromise the safety of the + * entire shell command. + * + * @param value - any value (it will be treated as a string) + * + * @example + * const both = true; + * sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar' + */ +const unquoted = value => new ShellStringUnquoted(value); + +exports.Formatter = Formatter; +exports.ShellString = ShellString; +exports.ShellStringText = ShellStringText; +exports.ShellStringUnquoted = ShellStringUnquoted; +exports.quoteForCmd = quoteForCmd; +exports.quoteForSh = quoteForSh; +exports.quoteForShell = quoteForShell; +exports.sh = sh; +exports.shellStringSemicolon = shellStringSemicolon; +exports.formatSymbol = formatSymbol; +exports.preformatSymbol = preformatSymbol; +exports.unquoted = unquoted; diff --git a/deps/npm/node_modules/puka/package.json b/deps/npm/node_modules/puka/package.json new file mode 100644 index 00000000000000..41798dc2493b85 --- /dev/null +++ b/deps/npm/node_modules/puka/package.json @@ -0,0 +1,38 @@ +{ + "name": "puka", + "version": "1.0.1", + "description": "A cross-platform library for safely passing strings through shells", + "keywords": [ + "args", + "arguments", + "cmd", + "command", + "command-line", + "cross-platform", + "escape", + "escaping", + "exec", + "linux", + "mac", + "macos", + "osx", + "quote", + "quoting", + "sh", + "shell", + "spawn", + "unix", + "win", + "win32", + "windows" + ], + "homepage": "https://gitlab.com/rhendric/puka", + "bugs": "https://gitlab.com/rhendric/puka/issues", + "license": "MIT", + "author": "Ryan Hendrickson ", + "repository": "gitlab:rhendric/puka", + "dependencies": {}, + "engines": { + "node": ">=4" + } +} \ No newline at end of file diff --git a/deps/npm/package.json b/deps/npm/package.json index 5dfcd6807f2653..aa6e5a7d34bfd9 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "7.0.11", + "version": "7.0.12", "name": "npm", "description": "a package manager for JavaScript", "keywords": [ @@ -45,7 +45,7 @@ "@npmcli/arborist": "^1.0.11", "@npmcli/ci-detect": "^1.2.0", "@npmcli/config": "^1.2.1", - "@npmcli/run-script": "^1.7.5", + "@npmcli/run-script": "^1.8.0", "abbrev": "~1.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", @@ -189,7 +189,7 @@ "jsdom": "^16.4.0", "marked-man": "^0.7.0", "require-inject": "^1.4.4", - "tap": "^14.10.8", + "tap": "^14.11.0", "yaml": "^1.10.0" }, "scripts": { @@ -205,9 +205,11 @@ "posttest": "npm run lint", "eslint": "eslint", "lint": "npm run eslint -- \"lib/**/*.js\"", + "linttest": "npm run eslint -- test/lib test/bin --fix", "lintfix": "npm run lint -- --fix", "prelint": "rimraf test/npm_cache*", - "resetdeps": "bash scripts/resetdeps.sh" + "resetdeps": "bash scripts/resetdeps.sh", + "prepublishOnly": "npm run lint && npm run linttest" }, "//": [ "XXX temporarily only run unit tests while v7 beta is in progress", diff --git a/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js b/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js new file mode 100644 index 00000000000000..25015aab65cb68 --- /dev/null +++ b/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js @@ -0,0 +1,19 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/init.js TAP classic npm init no args > should print helper info 1`] = ` +This utility will walk you through creating a package.json file. +It only covers the most common items, and tries to guess sensible defaults. + +See \`npm help init\` for definitive documentation on these fields +and exactly what they do. + +Use \`npm install \` afterwards to install a package and +save it as a dependency in the package.json file. + +Press ^C at any time to quit. +` diff --git a/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js b/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js new file mode 100644 index 00000000000000..a82905a399679a --- /dev/null +++ b/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js @@ -0,0 +1,15 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/utils/reify-finish.js TAP should write if everything above passes > written config 1`] = ` +hasBuiltinConfig=true +x=y + +[nested] +foo=bar + +` diff --git a/deps/npm/test/bin/npm-cli.js b/deps/npm/test/bin/npm-cli.js index b68d29185873ae..bcca99c8c8fe1f 100644 --- a/deps/npm/test/bin/npm-cli.js +++ b/deps/npm/test/bin/npm-cli.js @@ -5,6 +5,6 @@ t.test('loading the bin calls the implementation', t => { '../../lib/cli.js': proc => { t.equal(proc, process, 'called implementation with process object') t.end() - } + }, }) }) diff --git a/deps/npm/test/bin/npx-cli.js b/deps/npm/test/bin/npx-cli.js index fc85f636683822..2b7b488297cab2 100644 --- a/deps/npm/test/bin/npx-cli.js +++ b/deps/npm/test/bin/npx-cli.js @@ -64,7 +64,7 @@ t.test('use a bunch of deprecated switches and options', t => { '--shell-auto-fallback', '--ignore-existing', '-q', - 'foobar' + 'foobar', ] const expect = [ @@ -78,18 +78,18 @@ t.test('use a bunch of deprecated switches and options', t => { '--loglevel', 'warn', '--', - 'foobar' + 'foobar', ] requireInject(npx, { [cli]: () => {} }) t.strictSame(process.argv, expect) t.strictSame(logs, [ - [ 'npx: the --npm argument has been removed.' ], - [ 'npx: the --node-arg argument has been removed.' ], - [ 'npx: the --n argument has been removed.' ], - [ 'npx: the --always-spawn argument has been removed.' ], - [ 'npx: the --shell-auto-fallback argument has been removed.' ], - [ 'npx: the --ignore-existing argument has been removed.' ], - [ 'See `npm help exec` for more information' ] + ['npx: the --npm argument has been removed.'], + ['npx: the --node-arg argument has been removed.'], + ['npx: the --n argument has been removed.'], + ['npx: the --always-spawn argument has been removed.'], + ['npx: the --shell-auto-fallback argument has been removed.'], + ['npx: the --ignore-existing argument has been removed.'], + ['See `npm help exec` for more information'], ]) t.end() }) diff --git a/deps/npm/test/lib/access.js b/deps/npm/test/lib/access.js index 5d5a910f94e2fc..3063b6c53263f2 100644 --- a/deps/npm/test/lib/access.js +++ b/deps/npm/test/lib/access.js @@ -3,8 +3,8 @@ const requireInject = require('require-inject') const emptyMock = requireInject('../../lib/access.js', { '../../lib/npm.js': { - flatOptions: {} - } + flatOptions: {}, + }, }) test('completion', t => { @@ -27,7 +27,7 @@ test('completion', t => { 'ls-collaborators', 'edit', '2fa-required', - '2fa-not-required' + '2fa-not-required', ]) testComp(['npm', 'access', 'grant'], ['read-only', 'read-write']) @@ -75,7 +75,7 @@ test('edit', (t) => { access([ 'edit', - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -89,14 +89,14 @@ test('edit', (t) => { test('access public on unscoped package', (t) => { const prefix = t.testdir({ 'package.json': JSON.stringify({ - name: 'npm-access-public-pkg' - }) + name: 'npm-access-public-pkg', + }), }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'public' + 'public', ], (err) => { t.match( err, @@ -111,10 +111,10 @@ test('access public on scoped package', (t) => { t.plan(4) const name = '@scoped/npm-access-public-pkg' const prefix = t.testdir({ - 'package.json': JSON.stringify({ name }) + 'package.json': JSON.stringify({ name }), }) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { public: (pkg, { registry }) => { t.equal(pkg, name, 'should use pkg name ref') t.equal( @@ -123,17 +123,17 @@ test('access public on scoped package', (t) => { 'should forward correct options' ) return true - } + }, }, '../../lib/npm.js': { flatOptions: { - registry: 'https://registry.npmjs.org' + registry: 'https://registry.npmjs.org', }, - prefix - } + prefix, + }, }) access([ - 'public' + 'public', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access public on scoped package') @@ -142,13 +142,13 @@ test('access public on scoped package', (t) => { test('access public on missing package.json', (t) => { const prefix = t.testdir({ - 'node_modules': {} + node_modules: {}, }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'public' + 'public', ], (err) => { t.match( err, @@ -162,13 +162,13 @@ test('access public on missing package.json', (t) => { test('access public on invalid package.json', (t) => { const prefix = t.testdir({ 'package.json': '{\n', - 'node_modules': {} + node_modules: {}, }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'public' + 'public', ], (err) => { t.match( err, @@ -182,14 +182,14 @@ test('access public on invalid package.json', (t) => { test('access restricted on unscoped package', (t) => { const prefix = t.testdir({ 'package.json': JSON.stringify({ - name: 'npm-access-restricted-pkg' - }) + name: 'npm-access-restricted-pkg', + }), }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'restricted' + 'restricted', ], (err) => { t.match( err, @@ -204,10 +204,10 @@ test('access restricted on scoped package', (t) => { t.plan(4) const name = '@scoped/npm-access-restricted-pkg' const prefix = t.testdir({ - 'package.json': JSON.stringify({ name }) + 'package.json': JSON.stringify({ name }), }) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { restricted: (pkg, { registry }) => { t.equal(pkg, name, 'should use pkg name ref') t.equal( @@ -216,17 +216,17 @@ test('access restricted on scoped package', (t) => { 'should forward correct options' ) return true - } + }, }, '../../lib/npm.js': { flatOptions: { - registry: 'https://registry.npmjs.org' + registry: 'https://registry.npmjs.org', }, - prefix - } + prefix, + }, }) access([ - 'restricted' + 'restricted', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access restricted on scoped package') @@ -235,13 +235,13 @@ test('access restricted on scoped package', (t) => { test('access restricted on missing package.json', (t) => { const prefix = t.testdir({ - 'node_modules': {} + node_modules: {}, }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'restricted' + 'restricted', ], (err) => { t.match( err, @@ -255,13 +255,13 @@ test('access restricted on missing package.json', (t) => { test('access restricted on invalid package.json', (t) => { const prefix = t.testdir({ 'package.json': '{\n', - 'node_modules': {} + node_modules: {}, }) const access = requireInject('../../lib/access.js', { - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'restricted' + 'restricted', ], (err) => { t.match( err, @@ -275,21 +275,21 @@ test('access restricted on invalid package.json', (t) => { test('access grant read-only', (t) => { t.plan(5) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, '@scoped/another', 'should use expected spec') t.equal(team, 'myorg:myteam', 'should use expected team') t.equal(permissions, 'read-only', 'should forward permissions') return true - } + }, }, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ 'grant', 'read-only', 'myorg:myteam', - '@scoped/another' + '@scoped/another', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access grant read-only') @@ -299,21 +299,21 @@ test('access grant read-only', (t) => { test('access grant read-write', (t) => { t.plan(5) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, '@scoped/another', 'should use expected spec') t.equal(team, 'myorg:myteam', 'should use expected team') t.equal(permissions, 'read-write', 'should forward permissions') return true - } + }, }, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ 'grant', 'read-write', 'myorg:myteam', - '@scoped/another' + '@scoped/another', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access grant read-write') @@ -324,24 +324,24 @@ test('access grant current cwd', (t) => { t.plan(5) const prefix = t.testdir({ 'package.json': JSON.stringify({ - name: 'yargs' - }) + name: 'yargs', + }), }) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { grant: (spec, team, permissions) => { t.equal(spec, 'yargs', 'should use expected spec') t.equal(team, 'myorg:myteam', 'should use expected team') t.equal(permissions, 'read-write', 'should forward permissions') return true - } + }, }, - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ 'grant', 'read-write', - 'myorg:myteam' + 'myorg:myteam', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access grant current cwd') @@ -355,7 +355,7 @@ test('access grant others', (t) => { 'grant', 'rerere', 'myorg:myteam', - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -373,7 +373,7 @@ test('access grant missing team args', (t) => { 'grant', 'read-only', undefined, - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -391,7 +391,7 @@ test('access grant malformed team arg', (t) => { 'grant', 'read-only', 'foo', - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -403,11 +403,9 @@ test('access grant malformed team arg', (t) => { }) test('access 2fa-required/2fa-not-required', t => { - let pkg - t.plan(2) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { tfaRequired: (spec) => { t.equal(spec, '@scope/pkg', 'should use expected spec') return true @@ -415,9 +413,9 @@ test('access 2fa-required/2fa-not-required', t => { tfaNotRequired: (spec) => { t.equal(spec, 'unscoped-pkg', 'should use expected spec') return true - } + }, }, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access(['2fa-required', '@scope/pkg'], er => { @@ -434,19 +432,19 @@ test('access 2fa-required/2fa-not-required', t => { test('access revoke', (t) => { t.plan(4) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { revoke: (spec, team) => { t.equal(spec, '@scoped/another', 'should use expected spec') t.equal(team, 'myorg:myteam', 'should use expected team') return true - } + }, }, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ 'revoke', 'myorg:myteam', - '@scoped/another' + '@scoped/another', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access revoke') @@ -459,7 +457,7 @@ test('access revoke missing team args', (t) => { access([ 'revoke', undefined, - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -476,7 +474,7 @@ test('access revoke malformed team arg', (t) => { access([ 'revoke', 'foo', - '@scoped/another' + '@scoped/another', ], (err) => { t.match( err, @@ -490,18 +488,18 @@ test('access revoke malformed team arg', (t) => { test('npm access ls-packages with no team', (t) => { t.plan(3) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { lsPackages: (entity) => { t.equal(entity, 'foo', 'should use expected entity') return {} - } + }, }, '../../lib/utils/get-identity.js': () => Promise.resolve('foo'), '../../lib/utils/output.js': () => null, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ - 'ls-packages' + 'ls-packages', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access ls-packages with no team') @@ -511,18 +509,18 @@ test('npm access ls-packages with no team', (t) => { test('access ls-packages on team', (t) => { t.plan(3) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { lsPackages: (entity) => { t.equal(entity, 'myorg:myteam', 'should use expected entity') return {} - } + }, }, '../../lib/utils/output.js': () => null, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ 'ls-packages', - 'myorg:myteam' + 'myorg:myteam', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access ls-packages on team') @@ -533,21 +531,21 @@ test('access ls-collaborators on current', (t) => { t.plan(3) const prefix = t.testdir({ 'package.json': JSON.stringify({ - name: 'yargs' - }) + name: 'yargs', + }), }) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { lsCollaborators: (spec) => { t.equal(spec, 'yargs', 'should use expected spec') return {} - } + }, }, '../../lib/utils/output.js': () => null, - '../../lib/npm.js': { prefix } + '../../lib/npm.js': { prefix }, }) access([ - 'ls-collaborators' + 'ls-collaborators', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access ls-collaborators on current') @@ -557,18 +555,18 @@ test('access ls-collaborators on current', (t) => { test('access ls-collaborators on spec', (t) => { t.plan(3) const access = requireInject('../../lib/access.js', { - 'libnpmaccess': { + libnpmaccess: { lsCollaborators: (spec) => { t.equal(spec, 'yargs', 'should use expected spec') return {} - } + }, }, '../../lib/utils/output.js': () => null, - '../../lib/npm.js': {} + '../../lib/npm.js': {}, }) access([ 'ls-collaborators', - 'yargs' + 'yargs', ], (err) => { t.ifError(err, 'npm access') t.ok('should successfully access ls-packages with no team') diff --git a/deps/npm/test/lib/adduser.js b/deps/npm/test/lib/adduser.js index 22c7c49cfaeafd..4e6a56fc199d60 100644 --- a/deps/npm/test/lib/adduser.js +++ b/deps/npm/test/lib/adduser.js @@ -8,7 +8,7 @@ let result = '' const _flatOptions = { authType: 'legacy', registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', } let failSave = false @@ -21,14 +21,14 @@ const authDummy = () => Promise.resolve({ username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false - } + alwaysAuth: false, + }, }) const deleteMock = (key, where) => { deletedConfig = { ...deletedConfig, - [key]: where + [key]: where, } } const adduser = requireInject('../../lib/adduser.js', { @@ -43,30 +43,30 @@ const adduser = requireInject('../../lib/adduser.js', { config: { delete: deleteMock, get (key, where) { - if (!where || where === 'user') { + if (!where || where === 'user') return _flatOptions[key] - } }, getCredentialsByURI, async save () { - if (failSave) { + if (failSave) throw new Error('error saving user config') - } }, set (key, value, where) { setConfig = { ...setConfig, [key]: { value, - where - } + where, + }, } }, - setCredentialsByURI - } + setCredentialsByURI, + }, + }, + '../../lib/utils/output.js': msg => { + result = msg }, - '../../lib/utils/output.js': msg => { result = msg }, - '../../lib/auth/legacy.js': authDummy + '../../lib/auth/legacy.js': authDummy, }) test('simple login', (t) => { @@ -90,7 +90,7 @@ test('simple login', (t) => { _authtoken: 'user', _authToken: 'user', '//registry.npmjs.org/:-authtoken': undefined, - '//registry.npmjs.org/:_authToken': 'user' + '//registry.npmjs.org/:_authToken': 'user', }, 'should delete token in user config' ) @@ -101,7 +101,7 @@ test('simple login', (t) => { '//registry.npmjs.org/:_password': { value: 'cA==', where: 'user' }, '//registry.npmjs.org/:username': { value: 'u', where: 'user' }, '//registry.npmjs.org/:email': { value: 'u@npmjs.org', where: 'user' }, - '//registry.npmjs.org/:always-auth': { value: false, where: 'user' } + '//registry.npmjs.org/:always-auth': { value: false, where: 'user' }, }, 'should set expected user configs' ) diff --git a/deps/npm/test/lib/audit.js b/deps/npm/test/lib/audit.js index 4918cb2fc27110..cc7379394b2adb 100644 --- a/deps/npm/test/lib/audit.js +++ b/deps/npm/test/lib/audit.js @@ -5,7 +5,7 @@ const audit = require('../../lib/audit.js') t.test('should audit using Arborist', t => { let ARB_ARGS = null let AUDIT_CALLED = false - let REIFY_OUTPUT_CALLED = false + let REIFY_FINISH_CALLED = false let AUDIT_REPORT_CALLED = false let OUTPUT_CALLED = false let ARB_OBJ = null @@ -14,14 +14,14 @@ t.test('should audit using Arborist', t => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - json: false + json: false, }, }, 'npm-audit-report': () => { AUDIT_REPORT_CALLED = true return { report: 'there are vulnerabilities', - exitCode: 0 + exitCode: 0, } }, '@npmcli/arborist': function (args) { @@ -32,15 +32,15 @@ t.test('should audit using Arborist', t => { this.auditReport = {} } }, - '../../lib/utils/reify-output.js': arb => { - if (arb !== ARB_OBJ) { + '../../lib/utils/reify-finish.js': arb => { + if (arb !== ARB_OBJ) throw new Error('got wrong object passed to reify-output') - } - REIFY_OUTPUT_CALLED = true + + REIFY_FINISH_CALLED = true }, '../../lib/utils/output.js': () => { OUTPUT_CALLED = true - } + }, }) t.test('audit', t => { @@ -55,7 +55,7 @@ t.test('should audit using Arborist', t => { t.test('audit fix', t => { audit(['fix'], () => { - t.equal(REIFY_OUTPUT_CALLED, true, 'called reify output') + t.equal(REIFY_FINISH_CALLED, true, 'called reify output') t.end() }) }) @@ -68,12 +68,12 @@ t.test('should audit - json', t => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - json: true + json: true, }, }, 'npm-audit-report': () => ({ report: 'there are vulnerabilities', - exitCode: 0 + exitCode: 0, }), '@npmcli/arborist': function () { this.audit = () => { @@ -81,7 +81,7 @@ t.test('should audit - json', t => { } }, '../../lib/utils/reify-output.js': () => {}, - '../../lib/utils/output.js': () => {} + '../../lib/utils/output.js': () => {}, }) audit([], (err) => { @@ -100,11 +100,11 @@ t.test('report endpoint error', t => { prefix: 'foo', command: 'audit', flatOptions: { - json + json, }, log: { - warn: (...warning) => LOGS.push(warning) - } + warn: (...warning) => LOGS.push(warning), + }, }, 'npm-audit-report': () => { throw new Error('should not call audit report when there are errors') @@ -117,25 +117,25 @@ t.test('report endpoint error', t => { method: 'POST', uri: 'https://example.com/', headers: { - head: ['ers'] + head: ['ers'], }, statusCode: 420, body: json ? { nope: 'lol' } - : Buffer.from('i had a vuln but i eated it lol') - } + : Buffer.from('i had a vuln but i eated it lol'), + }, } } }, '../../lib/utils/reify-output.js': () => {}, '../../lib/utils/output.js': (...msg) => { OUTPUT.push(msg) - } + }, } // have to pass mocks to both to get the npm and output set right const auditError = requireInject('../../lib/utils/audit-error.js', mocks) const audit = requireInject('../../lib/audit.js', { ...mocks, - '../../lib/utils/audit-error.js': auditError + '../../lib/utils/audit-error.js': auditError, }) audit([], (err) => { @@ -156,8 +156,8 @@ t.test('report endpoint error', t => { ' "nope": "lol"\n' + ' }\n' + '}' - : 'i had a vuln but i eated it lol' - ] + : 'i had a vuln but i eated it lol', + ], ]) t.strictSame(LOGS, [['audit', 'hello, this didnt work']]) t.end() @@ -170,8 +170,10 @@ t.test('report endpoint error', t => { t.test('completion', t => { t.test('fix', t => { audit.completion({ - conf: { argv: { remain: ['npm', 'audit'] } } + conf: { argv: { remain: ['npm', 'audit'] } }, }, (err, res) => { + if (err) + throw err const subcmd = res.pop() t.equals('fix', subcmd, 'completes to fix') t.end() @@ -180,16 +182,17 @@ t.test('completion', t => { t.test('subcommand fix', t => { audit.completion({ - conf: { argv: { remain: ['npm', 'audit', 'fix'] } } + conf: { argv: { remain: ['npm', 'audit', 'fix'] } }, }, (err) => { - t.notOk(err, 'no errors') + if (err) + throw err t.end() }) }) t.test('subcommand not recognized', t => { audit.completion({ - conf: { argv: { remain: ['npm', 'audit', 'repare'] } } + conf: { argv: { remain: ['npm', 'audit', 'repare'] } }, }, (err) => { t.ok(err, 'not recognized') t.end() diff --git a/deps/npm/test/lib/auth/legacy.js b/deps/npm/test/lib/auth/legacy.js index 1607641d8390eb..f926ae13063eae 100644 --- a/deps/npm/test/lib/auth/legacy.js +++ b/deps/npm/test/lib/auth/legacy.js @@ -10,20 +10,20 @@ const legacy = requireInject('../../../lib/auth/legacy.js', { npmlog: { info: (...msgs) => { log += msgs.join(' ') - } + }, }, 'npm-profile': profile, '../../../lib/utils/open-url.js': (url, msg, cb) => { - if (url) { + if (url) cb() - } else { + else { cb(Object.assign( new Error('failed open url'), { code: 'ERROR' } )) } }, - '../../../lib/utils/read-user-info.js': read + '../../../lib/utils/read-user-info.js': read, }) test('login using username/password with token result', async (t) => { @@ -33,16 +33,16 @@ test('login using username/password with token result', async (t) => { const { message, - newCreds + newCreds, } = await legacy({ creds: { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -74,16 +74,16 @@ test('login using username/password with user info result', async (t) => { const { message, - newCreds + newCreds, } = await legacy({ creds: { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -98,7 +98,7 @@ test('login using username/password with user info result', async (t) => { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, 'should return used credentials' ) @@ -125,16 +125,16 @@ test('login otp requested', async (t) => { const { message, - newCreds + newCreds, } = await legacy({ creds: { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -165,10 +165,10 @@ test('login missing basic credential info', async (t) => { legacy({ creds: { username: 'u', - password: 'p' + password: 'p', }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { code: 'ERROR' }, 'should throw server response error' @@ -195,16 +195,16 @@ test('create new user when user not found', async (t) => { const { message, - newCreds + newCreds, } = await legacy({ creds: { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -245,13 +245,13 @@ test('prompts for user info if required', async (t) => { const { message, - newCreds + newCreds, } = await legacy({ creds: { - alwaysAuth: true + alwaysAuth: true, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -272,7 +272,7 @@ test('prompts for user info if required', async (t) => { username: 'foo', password: 'pass', email: 'foo@npmjs.org', - alwaysAuth: true + alwaysAuth: true, }, 'should return result from profile.login containing prompt info' ) @@ -309,10 +309,10 @@ test('request otp when creating new user', async (t) => { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) log = '' @@ -338,10 +338,10 @@ test('unknown error during user creation', async (t) => { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { code: 'ERROR' }, 'should throw unknown error' @@ -353,16 +353,18 @@ test('unknown error during user creation', async (t) => { }) test('open url error', async (t) => { - profile.login = async (opener, prompt, opts) => { await opener() } + profile.login = async (opener, prompt, opts) => { + await opener() + } await t.rejects( legacy({ creds: { username: 'u', - password: 'p' + password: 'p', }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { message: 'failed open url', code: 'ERROR' }, 'should throw unknown error' @@ -380,10 +382,10 @@ test('login no credentials provided', async (t) => { username: undefined, password: undefined, email: undefined, - alwaysAuth: undefined + alwaysAuth: undefined, }, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -404,10 +406,10 @@ test('scoped login', async (t) => { username: 'u', password: 'p', email: 'u@npmjs.org', - alwaysAuth: false + alwaysAuth: false, }, registry: 'https://diff-registry.npmjs.org/', - scope: 'myscope' + scope: 'myscope', }) t.equal( diff --git a/deps/npm/test/lib/auth/oauth.js b/deps/npm/test/lib/auth/oauth.js index a8461d235e5e59..82d478b52c7cc9 100644 --- a/deps/npm/test/lib/auth/oauth.js +++ b/deps/npm/test/lib/auth/oauth.js @@ -6,7 +6,7 @@ test('oauth login', (t) => { const oauthOpts = { creds: {}, registry: 'https://diff-registry.npmjs.org/', - scope: 'myscope' + scope: 'myscope', } const oauth = requireInject('../../../lib/auth/oauth.js', { @@ -18,9 +18,9 @@ test('oauth login', (t) => { set: (key, value) => { t.equal(key, 'sso-type', 'should define sso-type') t.equal(value, 'oauth', 'should set sso-type to oauth') - } - } - } + }, + }, + }, }) oauth(oauthOpts) diff --git a/deps/npm/test/lib/auth/saml.js b/deps/npm/test/lib/auth/saml.js index 3e0015bf39be38..87fa6688b57ea7 100644 --- a/deps/npm/test/lib/auth/saml.js +++ b/deps/npm/test/lib/auth/saml.js @@ -6,7 +6,7 @@ test('saml login', (t) => { const samlOpts = { creds: {}, registry: 'https://diff-registry.npmjs.org/', - scope: 'myscope' + scope: 'myscope', } const saml = requireInject('../../../lib/auth/saml.js', { @@ -18,9 +18,9 @@ test('saml login', (t) => { set: (key, value) => { t.equal(key, 'sso-type', 'should define sso-type') t.equal(value, 'saml', 'should set sso-type to saml') - } - } - } + }, + }, + }, }) saml(samlOpts) diff --git a/deps/npm/test/lib/auth/sso.js b/deps/npm/test/lib/auth/sso.js index 0e04309c82bf79..1fc04c64cd3cca 100644 --- a/deps/npm/test/lib/auth/sso.js +++ b/deps/npm/test/lib/auth/sso.js @@ -5,7 +5,7 @@ let log = '' let warn = '' const _flatOptions = { - ssoType: 'oauth' + ssoType: 'oauth', } const token = '24528a24f240' const SSO_URL = 'https://registry.npmjs.org/{SSO_URL}' @@ -18,17 +18,17 @@ const sso = requireInject('../../../lib/auth/sso.js', { }, warn: (...msgs) => { warn += msgs.join(' ') - } + }, }, 'npm-profile': profile, 'npm-registry-fetch': npmFetch, '../../../lib/npm.js': { - flatOptions: _flatOptions + flatOptions: _flatOptions, }, '../../../lib/utils/open-url.js': (url, msg, cb) => { - if (url) { + if (url) cb() - } else { + else { cb(Object.assign( new Error('failed open url'), { code: 'ERROR' } @@ -36,15 +36,15 @@ const sso = requireInject('../../../lib/auth/sso.js', { } }, '../../../lib/utils/otplease.js': (opts, fn) => { - if (opts) { + if (opts) return fn({ ...opts, otp: '1234' }) - } else { + else { throw Object.assign( new Error('failed retrieving otp'), { code: 'ERROR' } ) } - } + }, }) test('empty login', async (t) => { @@ -80,7 +80,7 @@ test('simple login', async (t) => { otp: '1234', registry: 'https://registry.npmjs.org/', scope: '', - ssoType: 'oauth' + ssoType: 'oauth', }, 'should use dummy password' ) @@ -91,11 +91,11 @@ test('simple login', async (t) => { const { message, - newCreds + newCreds, } = await sso({ creds: {}, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) t.equal( @@ -160,7 +160,7 @@ test('polling retry', async (t) => { await sso({ creds: {}, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }) log = '' @@ -180,7 +180,7 @@ test('polling error', async (t) => { sso({ creds: {}, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { message: 'unknown error', code: 'ERROR' }, 'should throw unknown error' @@ -199,7 +199,7 @@ test('no token retrieved from loginCouch', async (t) => { sso({ creds: {}, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { message: 'no SSO token returned' }, 'should throw no SSO token returned error' @@ -217,7 +217,7 @@ test('no sso url retrieved from loginCouch', async (t) => { sso({ creds: {}, registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', }), { message: 'no SSO URL returned by services' }, 'should throw no SSO url returned error' @@ -234,11 +234,11 @@ test('scoped login', async (t) => { const { message, - newCreds + newCreds, } = await sso({ creds: {}, registry: 'https://diff-registry.npmjs.org/', - scope: 'myscope' + scope: 'myscope', }) t.equal( diff --git a/deps/npm/test/lib/bin.js b/deps/npm/test/lib/bin.js index 05fc1e21e05d4c..c5ed2a91b98310 100644 --- a/deps/npm/test/lib/bin.js +++ b/deps/npm/test/lib/bin.js @@ -9,7 +9,7 @@ test('bin', (t) => { '../../lib/npm.js': { bin: dir, flatOptions: { global: false } }, '../../lib/utils/output.js': (output) => { t.equal(output, dir, 'prints the correct directory') - } + }, }) bin([], (err) => { @@ -35,7 +35,7 @@ test('bin -g', (t) => { '../../lib/utils/path.js': [dir], '../../lib/utils/output.js': (output) => { t.equal(output, dir, 'prints the correct directory') - } + }, }) bin([], (err) => { @@ -61,7 +61,7 @@ test('bin -g (not in path)', (t) => { '../../lib/utils/path.js': ['/not/my/dir'], '../../lib/utils/output.js': (output) => { t.equal(output, dir, 'prints the correct directory') - } + }, }) bin([], (err) => { diff --git a/deps/npm/test/lib/birthday.js b/deps/npm/test/lib/birthday.js index 35255f97aa34bb..21b60b4c79620f 100644 --- a/deps/npm/test/lib/birthday.js +++ b/deps/npm/test/lib/birthday.js @@ -38,6 +38,7 @@ test('birthday (nope again)', (t) => { const d = new D() return d[B[f]('Z2V0RnVsbFllYXI=', _6)[l]()]() + 1 } + [B[f]('Z2V0VVRDTW9udGg=', _6)[l]()] () { return 9 } @@ -66,6 +67,7 @@ test('birthday (yup)', (t) => { [B[f]('Z2V0VVRDTW9udGg=', _6)[l]()] () { return 8 } + [B[f]('Z2V0VVRDRGF0ZQ==', _6)[l]()] () { return 29 } diff --git a/deps/npm/test/lib/bugs.js b/deps/npm/test/lib/bugs.js index 79d5089724a50b..df64349878e619 100644 --- a/deps/npm/test/lib/bugs.js +++ b/deps/npm/test/lib/bugs.js @@ -5,40 +5,40 @@ const pacote = { manifest: async (spec, options) => { return spec === 'nobugs' ? { name: 'nobugs', - version: '1.2.3' - } - : spec === 'bugsurl' ? { - name: 'bugsurl', - version: '1.2.3', - bugs: 'https://bugzilla.localhost/bugsurl' - } - : spec === 'bugsobj' ? { - name: 'bugsobj', - version: '1.2.3', - bugs: { url: 'https://bugzilla.localhost/bugsobj' } - } - : spec === 'bugsobj-nourl' ? { - name: 'bugsobj-nourl', - version: '1.2.3', - bugs: { no: 'url here' } - } - : spec === 'repourl' ? { - name: 'repourl', - version: '1.2.3', - repository: 'https://github.com/foo/repourl' - } - : spec === 'repoobj' ? { - name: 'repoobj', version: '1.2.3', - repository: { url: 'https://github.com/foo/repoobj' } } - : spec === '.' ? { - name: 'thispkg', - version: '1.2.3', - bugs: 'https://example.com' - } - : null - } + : spec === 'bugsurl' ? { + name: 'bugsurl', + version: '1.2.3', + bugs: 'https://bugzilla.localhost/bugsurl', + } + : spec === 'bugsobj' ? { + name: 'bugsobj', + version: '1.2.3', + bugs: { url: 'https://bugzilla.localhost/bugsobj' }, + } + : spec === 'bugsobj-nourl' ? { + name: 'bugsobj-nourl', + version: '1.2.3', + bugs: { no: 'url here' }, + } + : spec === 'repourl' ? { + name: 'repourl', + version: '1.2.3', + repository: 'https://github.com/foo/repourl', + } + : spec === 'repoobj' ? { + name: 'repoobj', + version: '1.2.3', + repository: { url: 'https://github.com/foo/repoobj' }, + } + : spec === '.' ? { + name: 'thispkg', + version: '1.2.3', + bugs: 'https://example.com', + } + : null + }, } // keep a tally of which urls got opened @@ -51,7 +51,7 @@ const openUrl = (url, errMsg, cb) => { const bugs = requireInject('../../lib/bugs.js', { pacote, - '../../lib/utils/open-url.js': openUrl + '../../lib/utils/open-url.js': openUrl, }) t.test('completion', t => { @@ -70,7 +70,7 @@ t.test('open bugs urls', t => { bugsobj: 'https://bugzilla.localhost/bugsobj', repourl: 'https://github.com/foo/repourl/issues', repoobj: 'https://github.com/foo/repoobj/issues', - '.': 'https://example.com' + '.': 'https://example.com', } const keys = Object.keys(expect) t.plan(keys.length) diff --git a/deps/npm/test/lib/cache.js b/deps/npm/test/lib/cache.js index 9c27386ed8fe19..2e9ad346bb59bc 100644 --- a/deps/npm/test/lib/cache.js +++ b/deps/npm/test/lib/cache.js @@ -5,12 +5,12 @@ const path = require('path') const usageUtil = () => 'usage instructions' const flatOptions = { - force: false + force: false, } const npm = { flatOptions, - cache: '/fake/path' + cache: '/fake/path', } let rimrafPath = '' @@ -23,22 +23,22 @@ let logOutput = [] const npmlog = { silly: (...args) => { logOutput.push(['silly', ...args]) - } + }, } let tarballStreamSpec = '' let tarballStreamOpts = {} const pacote = { tarball: { - stream: (spec, cb, opts) => { + stream: (spec, handler, opts) => { tarballStreamSpec = spec tarballStreamOpts = opts - return cb({ + return handler({ resume: () => {}, - promise: () => Promise.resolve() + promise: () => Promise.resolve(), }) - } - } + }, + }, } let outputOutput = [] @@ -46,18 +46,16 @@ const output = (msg) => { outputOutput.push(msg) } -let cacacheVerifyPath = '' const cacacheVerifyStats = { keptSize: 100, verifiedContent: 1, totalEntries: 1, - runTime: { total: 2000 } + runTime: { total: 2000 }, } const cacache = { verify: (path) => { - cacacheVerifyPath = path return cacacheVerifyStats - } + }, } const mocks = { @@ -67,7 +65,7 @@ const mocks = { rimraf, '../../lib/npm.js': npm, '../../lib/utils/output.js': output, - '../../lib/utils/usage.js': usageUtil + '../../lib/utils/usage.js': usageUtil, } const cache = requireInject('../../lib/cache.js', mocks) @@ -132,7 +130,7 @@ t.test('cache add pkg only', t => { t.ifError(err) t.strictSame(logOutput, [ ['silly', 'cache add', 'args', ['mypkg']], - ['silly', 'cache add', 'spec', 'mypkg'] + ['silly', 'cache add', 'spec', 'mypkg'], ], 'logs correctly') t.equal(tarballStreamSpec, 'mypkg', 'passes the correct spec to pacote') t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote') @@ -151,7 +149,7 @@ t.test('cache add pkg w/ spec modifier', t => { t.ifError(err) t.strictSame(logOutput, [ ['silly', 'cache add', 'args', ['mypkg', 'latest']], - ['silly', 'cache add', 'spec', 'mypkg@latest'] + ['silly', 'cache add', 'spec', 'mypkg@latest'], ], 'logs correctly') t.equal(tarballStreamSpec, 'mypkg@latest', 'passes the correct spec to pacote') t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote') @@ -162,7 +160,6 @@ t.test('cache add pkg w/ spec modifier', t => { t.test('cache verify', t => { t.teardown(() => { outputOutput = [] - cacacheVerifyPath = '' }) cache(['verify'], err => { @@ -171,7 +168,7 @@ t.test('cache verify', t => { `Cache verified and compressed (${path.join(npm.cache, '_cacache')})`, 'Content verified: 1 (100 bytes)', 'Index entries: 1', - 'Finished in 2s' + 'Finished in 2s', ], 'prints correct output') t.end() }) @@ -186,7 +183,6 @@ t.test('cache verify w/ extra output', t => { t.teardown(() => { npm.cache = '/fake/path' outputOutput = [] - cacacheVerifyPath = '' delete cacacheVerifyStats.badContentCount delete cacacheVerifyStats.reclaimedCount delete cacacheVerifyStats.reclaimedSize @@ -202,7 +198,7 @@ t.test('cache verify w/ extra output', t => { 'Content garbage-collected: 2 (200 bytes)', 'Missing content: 3', 'Index entries: 1', - 'Finished in 2s' + 'Finished in 2s', ], 'prints correct output') t.end() }) @@ -221,7 +217,7 @@ t.test('cache completion', t => { testComp(['npm', 'cache'], [ 'add', 'clean', - 'verify' + 'verify', ]) testComp(['npm', 'cache', 'add'], []) diff --git a/deps/npm/test/lib/ci.js b/deps/npm/test/lib/ci.js index 43ad2783b02d76..8ddb8f8aad23ca 100644 --- a/deps/npm/test/lib/ci.js +++ b/deps/npm/test/lib/ci.js @@ -3,7 +3,6 @@ const util = require('util') const readdir = util.promisify(fs.readdir) const { test } = require('tap') -const { resolve } = require('path') const requireInject = require('require-inject') @@ -12,9 +11,10 @@ test('should use Arborist', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - global: false - } + global: false, + }, }, + '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/arborist': function (args) { t.ok(args, 'gets options object') this.loadVirtual = () => { @@ -25,19 +25,21 @@ test('should use Arborist', (t) => { t.ok(true, 'reify is called') } }, - 'util': { - 'inherits': () => {}, - 'promisify': (fn) => fn + util: { + inherits: () => {}, + promisify: (fn) => fn, }, - 'rimraf': (path) => { + rimraf: (path) => { t.ok(path, 'rimraf called with path') return Promise.resolve(true) }, '../../lib/utils/reify-output.js': function (arb) { t.ok(arb, 'gets arborist tree') - } + }, }) - ci(null, () => { + ci(null, er => { + if (er) + throw er t.end() }) }) @@ -47,37 +49,42 @@ test('should pass flatOptions to Arborist.reify', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - production: true - } + production: true, + }, }, + '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/arborist': function () { this.loadVirtual = () => Promise.resolve(true) this.reify = async (options) => { t.equal(options.production, true, 'should pass flatOptions to Arborist.reify') t.end() } - } + }, + }) + ci(null, er => { + if (er) + throw er }) - ci(null, () => {}) }) test('should throw if package-lock.json or npm-shrinkwrap missing', (t) => { const testDir = t.testdir({ 'index.js': 'some contents', - 'package.json': 'some info' + 'package.json': 'some info', }) const ci = requireInject('../../lib/ci.js', { '../../lib/npm.js': { prefix: testDir, flatOptions: { - global: false - } + global: false, + }, }, - 'npmlog': { + '../../lib/utils/reify-finish.js': async () => {}, + npmlog: { verbose: () => { t.ok(true, 'log fn called') - } + }, }, }) ci(null, (err, res) => { @@ -92,9 +99,10 @@ test('should throw ECIGLOBAL', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - global: true - } - } + global: true, + }, + }, + '../../lib/utils/reify-finish.js': async () => {}, }) ci(null, (err, res) => { t.equals(err.code, 'ECIGLOBAL', 'throws error with global packages') @@ -105,18 +113,19 @@ test('should throw ECIGLOBAL', (t) => { test('should remove existing node_modules before installing', (t) => { const testDir = t.testdir({ - 'node_modules': { - 'some-file': 'some contents' - } + node_modules: { + 'some-file': 'some contents', + }, }) const ci = requireInject('../../lib/ci.js', { '../../lib/npm.js': { prefix: testDir, flatOptions: { - global: false - } + global: false, + }, }, + '../../lib/utils/reify-finish.js': async () => {}, '@npmcli/arborist': function () { this.loadVirtual = () => Promise.resolve(true) this.reify = async (options) => { @@ -127,8 +136,11 @@ test('should remove existing node_modules before installing', (t) => { t.same(nodeModules, ['node_modules'], 'should only have the node_modules directory') t.end() } - } + }, }) - ci(null, () => {}) + ci(null, er => { + if (er) + throw er + }) }) diff --git a/deps/npm/test/lib/cli.js b/deps/npm/test/lib/cli.js index 0d9b6ad6a5a8a6..b5441be1e44d87 100644 --- a/deps/npm/test/lib/cli.js +++ b/deps/npm/test/lib/cli.js @@ -8,14 +8,16 @@ const npmock = { config: { settings: {}, get: (k) => npmock.config.settings[k], - set: (k, v) => { npmock.config.settings[k] = v }, + set: (k, v) => { + npmock.config.settings[k] = v + }, }, - commands: {} + commands: {}, } const unsupportedMock = { checkForBrokenNode: () => {}, - checkForUnsupportedNode: () => {} + checkForUnsupportedNode: () => {}, } let errorHandlerCalled = null @@ -31,7 +33,7 @@ const logs = [] const npmlogMock = { pause: () => logs.push('pause'), verbose: (...msg) => logs.push(['verbose', ...msg]), - info: (...msg) => logs.push(['info', ...msg]) + info: (...msg) => logs.push(['info', ...msg]), } const requireInject = require('require-inject') @@ -39,7 +41,7 @@ const cli = requireInject.installGlobally('../../lib/cli.js', { '../../lib/npm.js': npmock, '../../lib/utils/unsupported.js': unsupportedMock, '../../lib/utils/error-handler.js': errorHandlerMock, - npmlog: npmlogMock + npmlog: npmlogMock, }) t.test('print the version, and treat npm_g to npm -g', t => { @@ -50,7 +52,7 @@ t.test('print the version, and treat npm_g to npm -g', t => { const proc = { argv: ['node', 'npm_g', '-v'], version: '420.69.lol', - on: () => {} + on: () => {}, } process.argv = proc.argv npmock.config.settings.version = true @@ -58,14 +60,14 @@ t.test('print the version, and treat npm_g to npm -g', t => { cli(proc) t.strictSame(npmock.argv, []) - t.strictSame(proc.argv, [ 'node', 'npm', '-g', '-v' ]) + t.strictSame(proc.argv, ['node', 'npm', '-g', '-v']) t.strictSame(logs, [ 'pause', - [ 'verbose', 'cli', [ 'node', 'npm', '-g', '-v' ] ], - [ 'info', 'using', 'npm@%s', '99.99.99' ], - [ 'info', 'using', 'node@%s', '420.69.lol' ] + ['verbose', 'cli', ['node', 'npm', '-g', '-v']], + ['info', 'using', 'npm@%s', '99.99.99'], + ['info', 'using', 'node@%s', '420.69.lol'], ]) - t.strictSame(consoleLogs, [ [ '99.99.99' ] ]) + t.strictSame(consoleLogs, [['99.99.99']]) t.strictSame(errorHandlerExitCalled, 0) delete npmock.config.settings.version @@ -87,7 +89,7 @@ t.test('calling with --versions calls npm version with no args', t => { const processArgv = process.argv const proc = { argv: ['node', 'npm', 'install', 'or', 'whatever', '--versions'], - on: () => {} + on: () => {}, } process.argv = proc.argv npmock.config.set('versions', true) @@ -107,12 +109,12 @@ t.test('calling with --versions calls npm version with no args', t => { npmock.commands.version = (args, cb) => { t.equal(proc.title, 'npm') t.strictSame(npmock.argv, []) - t.strictSame(proc.argv, [ 'node', 'npm', 'install', 'or', 'whatever', '--versions' ]) + t.strictSame(proc.argv, ['node', 'npm', 'install', 'or', 'whatever', '--versions']) t.strictSame(logs, [ 'pause', - [ 'verbose', 'cli', [ 'node', 'npm', 'install', 'or', 'whatever', '--versions' ] ], - [ 'info', 'using', 'npm@%s', '99.99.99' ], - [ 'info', 'using', 'node@%s', undefined ] + ['verbose', 'cli', ['node', 'npm', 'install', 'or', 'whatever', '--versions']], + ['info', 'using', 'npm@%s', '99.99.99'], + ['info', 'using', 'node@%s', undefined], ]) t.strictSame(consoleLogs, []) @@ -131,7 +133,7 @@ t.test('print usage if -h provided', t => { console.log = (...msg) => consoleLogs.push(msg) const proc = { argv: ['node', 'npm', 'asdf'], - on: () => {} + on: () => {}, } npmock.argv = ['asdf'] @@ -150,12 +152,12 @@ t.test('print usage if -h provided', t => { t.equal(proc.title, 'npm') t.strictSame(args, ['asdf']) t.strictSame(npmock.argv, ['asdf']) - t.strictSame(proc.argv, [ 'node', 'npm', 'asdf' ]) + t.strictSame(proc.argv, ['node', 'npm', 'asdf']) t.strictSame(logs, [ 'pause', - [ 'verbose', 'cli', [ 'node', 'npm', 'asdf' ] ], - [ 'info', 'using', 'npm@%s', '99.99.99' ], - [ 'info', 'using', 'node@%s', undefined ] + ['verbose', 'cli', ['node', 'npm', 'asdf']], + ['info', 'using', 'npm@%s', '99.99.99'], + ['info', 'using', 'node@%s', undefined], ]) t.strictSame(consoleLogs, []) t.strictSame(errorHandlerExitCalled, null) @@ -170,11 +172,10 @@ t.test('load error calls error handler', t => { LOAD_ERROR = er const proc = { argv: ['node', 'npm', 'asdf'], - on: () => {} + on: () => {}, } cli(proc) t.strictSame(errorHandlerCalled, [er]) LOAD_ERROR = null t.end() }) - diff --git a/deps/npm/test/lib/config.js b/deps/npm/test/lib/config.js index 890d65731a88c7..8a11a40c813370 100644 --- a/deps/npm/test/lib/config.js +++ b/deps/npm/test/lib/config.js @@ -25,20 +25,20 @@ const types = { 'init-author-name': String, 'init-version': String, 'init.author.name': String, - 'init.version': String + 'init.version': String, } const defaults = { 'init-author-name': '', 'init-version': '1.0.0', 'init.author.name': '', - 'init.version': '1.0.0' + 'init.version': '1.0.0', } const flatOptions = { editor: 'vi', json: false, long: false, - global: false + global: false, } const npm = { @@ -46,17 +46,21 @@ const npm = { log: { info: () => null, enableProgress: () => null, - disableProgress: () => null + disableProgress: () => null, }, config: { data: new Map(Object.entries({ default: { data: defaults, source: 'default values' }, global: { data: {}, source: '/etc/npmrc' }, - cli: { data: flatOptions, source: 'command line options' } + cli: { data: flatOptions, source: 'command line options' }, })), - get (key) { return flatOptions[key] }, - validate () { return true } - } + get (key) { + return flatOptions[key] + }, + validate () { + return true + }, + }, } const usageUtil = () => 'usage instructions' @@ -64,8 +68,10 @@ const usageUtil = () => 'usage instructions' const mocks = { '../../lib/utils/config.js': { defaults, types }, '../../lib/npm.js': npm, - '../../lib/utils/output.js': msg => { result = msg }, - '../../lib/utils/usage.js': usageUtil + '../../lib/utils/output.js': msg => { + result = msg + }, + '../../lib/utils/usage.js': usageUtil, } const config = requireInject('../../lib/config.js', mocks) @@ -99,9 +105,9 @@ t.test('config list overrides', t => { npm.config.data.set('user', { data: { 'init.author.name': 'Foo', - '//private-reg.npmjs.org/:_authThoken': 'f00ba1' + '//private-reg.npmjs.org/:_authThoken': 'f00ba1', }, - source: '~/.npmrc' + source: '~/.npmrc', }) flatOptions['init.author.name'] = 'Bar' npm.config.find = () => 'cli' @@ -144,7 +150,7 @@ t.test('config list --json', t => { result = '' npm.config.list = [{ '//private-reg.npmjs.org/:_authThoken': 'f00ba1', - ...npm.config.data.get('cli').data + ...npm.config.data.get('cli').data, }] const npmConfigGet = npm.config.get npm.config.get = key => npm.config.list[0][key] @@ -164,7 +170,7 @@ t.test('config list --json', t => { editor: 'vi', json: true, long: false, - global: false + global: false, }, 'should list configs usin json' ) @@ -413,7 +419,7 @@ t.test('config edit', t => { init.author.name=Foo sign-git-commit=true` npm.config.data.set('user', { - source: '~/.npmrc' + source: '~/.npmrc', }) npm.config.save = async where => { t.equal(where, 'user', 'should save to user config by default') @@ -422,25 +428,29 @@ sign-git-commit=true` ...mocks, 'mkdirp-infer-owner': async () => null, fs: { - readFile (path, encoding, cb) { cb(null, npmrc) }, + readFile (path, encoding, cb) { + cb(null, npmrc) + }, writeFile (file, data, encoding, cb) { t.equal(file, '~/.npmrc', 'should save to expected file location') t.matchSnapshot(data, 'should write config file') cb() - } + }, }, editor: (file, { editor }, cb) => { t.equal(file, '~/.npmrc', 'should match user source data') t.equal(editor, 'vi', 'should use default editor') cb() - } + }, } const config = requireInject('../../lib/config.js', editMocks) config(['edit'], (err) => { t.ifError(err, 'npm config edit') // test no config file result - editMocks.fs.readFile = (p, e, cb) => { cb(new Error('ERR')) } + editMocks.fs.readFile = (p, e, cb) => { + cb(new Error('ERR')) + } const config = requireInject('../../lib/config.js', editMocks) config(['edit'], (err) => { t.ifError(err, 'npm config edit') @@ -459,7 +469,7 @@ t.test('config edit --global', t => { flatOptions.global = true const npmrc = 'init.author.name=Foo' npm.config.data.set('global', { - source: '/etc/npmrc' + source: '/etc/npmrc', }) npm.config.save = async where => { t.equal(where, 'global', 'should save to global config') @@ -468,18 +478,20 @@ t.test('config edit --global', t => { ...mocks, 'mkdirp-infer-owner': async () => null, fs: { - readFile (path, encoding, cb) { cb(null, npmrc) }, + readFile (path, encoding, cb) { + cb(null, npmrc) + }, writeFile (file, data, encoding, cb) { t.equal(file, '/etc/npmrc', 'should save to global file location') t.matchSnapshot(data, 'should write global config file') cb() - } + }, }, editor: (file, { editor }, cb) => { t.equal(file, '/etc/npmrc', 'should match global source data') t.equal(editor, 'vi', 'should use default editor') cb() - } + }, } const config = requireInject('../../lib/config.js', editMocks) config(['edit'], (err) => { @@ -524,7 +536,7 @@ t.test('completion', t => { 'ls', 'rm', 'edit', - 'list' + 'list', ]) testComp(['npm', 'config', 'set', 'foo'], []) const possibleConfigKeys = [...Object.keys(types)] @@ -539,10 +551,10 @@ t.test('completion', t => { completion({ conf: { argv: { - remain: ['npm', 'config'] - } + remain: ['npm', 'config'], + }, }, - partialWord: 'l' + partialWord: 'l', }, (er, res) => { t.ifError(er) t.strictSame(res, [ @@ -551,7 +563,7 @@ t.test('completion', t => { 'delete', 'ls', 'rm', - 'edit' + 'edit', ], 'npm config') }) diff --git a/deps/npm/test/lib/dedupe.js b/deps/npm/test/lib/dedupe.js index a88c41f6e9c674..ff2d2be5340524 100644 --- a/deps/npm/test/lib/dedupe.js +++ b/deps/npm/test/lib/dedupe.js @@ -1,5 +1,4 @@ const { test } = require('tap') -const dedupe = require('../../lib/dedupe.js') const requireInject = require('require-inject') test('should remove dupes using Arborist', (t) => { @@ -7,8 +6,8 @@ test('should remove dupes using Arborist', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - 'dryRun': 'false' - } + dryRun: 'false', + }, }, '@npmcli/arborist': function (args) { t.ok(args, 'gets options object') @@ -18,11 +17,13 @@ test('should remove dupes using Arborist', (t) => { t.ok(true, 'dedupe is called') } }, - '../../lib/utils/reify-output.js': (arb) => { + '../../lib/utils/reify-finish.js': (arb) => { t.ok(arb, 'gets arborist tree') - } + }, }) - dedupe({ dryRun: true }, () => { + dedupe({ dryRun: true }, er => { + if (er) + throw er t.ok(true, 'callback is called') t.end() }) @@ -33,17 +34,16 @@ test('should remove dupes using Arborist - no arguments', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - 'dryRun': 'true' - } + dryRun: 'true', + }, }, '@npmcli/arborist': function (args) { t.ok(args.dryRun, 'gets dryRun from flatOptions') this.dedupe = () => {} }, - '../../lib/utils/reify-output.js': () => {} + '../../lib/utils/reify-output.js': () => {}, }) dedupe(null, () => { t.end() }) }) - diff --git a/deps/npm/test/lib/dist-tag.js b/deps/npm/test/lib/dist-tag.js index ad08c2be13f635..e9dde48062dc27 100644 --- a/deps/npm/test/lib/dist-tag.js +++ b/deps/npm/test/lib/dist-tag.js @@ -8,32 +8,33 @@ let log = '' // these declared opts are used in ./utils/read-local-package.js const _flatOptions = { global: false, - get prefix () { return prefix } + get prefix () { + return prefix + }, } const routeMap = { '/-/package/@scoped%2fpkg/dist-tags': { latest: '1.0.0', a: '0.0.1', - b: '0.5.0' + b: '0.5.0', }, '/-/package/@scoped%2fanother/dist-tags': { latest: '2.0.0', a: '0.0.2', - b: '0.6.0' + b: '0.6.0', }, '/-/package/@scoped%2fanother/dist-tags/c': { latest: '7.7.7', a: '0.0.2', b: '0.6.0', - c: '7.7.7' - } + c: '7.7.7', + }, } let npmRegistryFetchMock = (url, opts) => { - if (url === '/-/package/foo/dist-tags') { + if (url === '/-/package/foo/dist-tags') throw new Error('no package found') - } return routeMap[url] } @@ -41,9 +42,9 @@ let npmRegistryFetchMock = (url, opts) => { npmRegistryFetchMock.json = async (url, opts) => routeMap[url] const logger = (...msgs) => { - for (const msg of [...msgs]) { + for (const msg of [...msgs]) log += msg + ' ' - } + log += '\n' } @@ -52,25 +53,29 @@ const distTag = requireInject('../../lib/dist-tag.js', { error: logger, info: logger, verbose: logger, - warn: logger + warn: logger, + }, + get 'npm-registry-fetch' () { + return npmRegistryFetchMock }, - get 'npm-registry-fetch' () { return npmRegistryFetchMock }, '../../lib/npm.js': { flatOptions: _flatOptions, config: { get (key) { return _flatOptions[key] - } - } + }, + }, + }, + '../../lib/utils/output.js': msg => { + result = msg }, - '../../lib/utils/output.js': msg => { result = msg } }) test('ls in current package', (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ - name: '@scoped/pkg' - }) + name: '@scoped/pkg', + }), }) distTag(['ls'], (err) => { t.ifError(err, 'npm dist-tags ls') @@ -87,8 +92,8 @@ test('ls in current package', (t) => { test('no args in current package', (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ - name: '@scoped/pkg' - }) + name: '@scoped/pkg', + }), }) distTag([], (err) => { t.ifError(err, 'npm dist-tags ls') @@ -146,8 +151,8 @@ test('ls on missing package', (t) => { test('ls on missing name in current package', (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ - version: '1.0.0' - }) + version: '1.0.0', + }), }) distTag(['ls'], (err) => { t.matchSnapshot( @@ -294,9 +299,9 @@ test('completion', t => { completion({ conf: { argv: { - remain: ['npm', 'dist-tag'] - } - } + remain: ['npm', 'dist-tag'], + }, + }, }, (err, res) => { t.ifError(err, 'npm dist-tags completion') @@ -305,7 +310,7 @@ test('completion', t => { [ 'add', 'rm', - 'ls' + 'ls', ], 'should list npm dist-tag commands for completion' ) @@ -314,9 +319,9 @@ test('completion', t => { completion({ conf: { argv: { - remain: ['npm', 'dist-tag', 'foobar'] - } - } + remain: ['npm', 'dist-tag', 'foobar'], + }, + }, }, (err) => { t.notOk(err, 'should ignore any unkown name') }) diff --git a/deps/npm/test/lib/docs.js b/deps/npm/test/lib/docs.js index 48ba9a3b57f442..b4ede873167d4f 100644 --- a/deps/npm/test/lib/docs.js +++ b/deps/npm/test/lib/docs.js @@ -5,30 +5,30 @@ const pacote = { manifest: async (spec, options) => { return spec === 'nodocs' ? { name: 'nodocs', - version: '1.2.3' - } - : spec === 'docsurl' ? { - name: 'docsurl', - version: '1.2.3', - homepage: 'https://bugzilla.localhost/docsurl' - } - : spec === 'repourl' ? { - name: 'repourl', - version: '1.2.3', - repository: 'https://github.com/foo/repourl' - } - : spec === 'repoobj' ? { - name: 'repoobj', - version: '1.2.3', - repository: { url: 'https://github.com/foo/repoobj' } - } - : spec === '.' ? { - name: 'thispkg', version: '1.2.3', - homepage: 'https://example.com' } - : null - } + : spec === 'docsurl' ? { + name: 'docsurl', + version: '1.2.3', + homepage: 'https://bugzilla.localhost/docsurl', + } + : spec === 'repourl' ? { + name: 'repourl', + version: '1.2.3', + repository: 'https://github.com/foo/repourl', + } + : spec === 'repoobj' ? { + name: 'repoobj', + version: '1.2.3', + repository: { url: 'https://github.com/foo/repoobj' }, + } + : spec === '.' ? { + name: 'thispkg', + version: '1.2.3', + homepage: 'https://example.com', + } + : null + }, } // keep a tally of which urls got opened @@ -41,7 +41,7 @@ const openUrl = (url, errMsg, cb) => { const docs = requireInject('../../lib/docs.js', { pacote, - '../../lib/utils/open-url.js': openUrl + '../../lib/utils/open-url.js': openUrl, }) t.test('completion', t => { @@ -58,7 +58,7 @@ t.test('open docs urls', t => { docsurl: 'https://bugzilla.localhost/docsurl', repourl: 'https://github.com/foo/repourl#readme', repoobj: 'https://github.com/foo/repoobj#readme', - '.': 'https://example.com' + '.': 'https://example.com', } const keys = Object.keys(expect) t.plan(keys.length) diff --git a/deps/npm/test/lib/exec.js b/deps/npm/test/lib/exec.js index 08592353ce36c2..c65f916428d968 100644 --- a/deps/npm/test/lib/exec.js +++ b/deps/npm/test/lib/exec.js @@ -10,9 +10,11 @@ class Arborist { ARB_CTOR.push(options) this.path = options.path } + async loadActual () { return ARB_ACTUAL_TREE[this.path] } + async reify (options) { ARB_REIFY.push(options) } @@ -26,18 +28,18 @@ const npm = { yes: true, call: '', package: [], - legacyPeerDeps: false + legacyPeerDeps: false, }, localPrefix: 'local-prefix', localBin: 'local-bin', globalBin: 'global-bin', config: { get: k => { - if (k !== 'cache') { + if (k !== 'cache') throw new Error('unexpected config get') - } + return 'cache-dir' - } + }, }, log: { disableProgress: () => { @@ -48,23 +50,22 @@ const npm = { }, warn: (...args) => { LOG_WARN.push(args) - } - } + }, + }, } const RUN_SCRIPTS = [] const runScript = async opt => { RUN_SCRIPTS.push(opt) - if (!PROGRESS_IGNORED && PROGRESS_ENABLED) { + if (!PROGRESS_IGNORED && PROGRESS_ENABLED) throw new Error('progress not disabled during run script!') - } } const MANIFESTS = {} const pacote = { manifest: async (spec, options) => { return MANIFESTS[spec] - } + }, } const MKDIRPS = [] @@ -89,7 +90,7 @@ const mocks = { '../../lib/npm.js': npm, pacote, read, - 'mkdirp-infer-owner': mkdirp + 'mkdirp-infer-owner': mkdirp, } const exec = requireInject('../../lib/exec.js', mocks) @@ -113,7 +114,7 @@ t.afterEach(cb => { t.test('npx foo, bin already exists locally', async t => { const path = t.testdir({ - foo: 'just some file' + foo: 'just some file', }) PROGRESS_IGNORED = true @@ -129,15 +130,15 @@ t.test('npx foo, bin already exists locally', async t => { stdioString: true, event: 'npx', env: { - PATH: [path, ...PATH].join(delimiter) + PATH: [path, ...PATH].join(delimiter), }, - stdio: 'inherit' + stdio: 'inherit', }]) }) t.test('npx foo, bin already exists globally', async t => { const path = t.testdir({ - foo: 'just some file' + foo: 'just some file', }) PROGRESS_IGNORED = true @@ -153,9 +154,9 @@ t.test('npx foo, bin already exists globally', async t => { stdioString: true, event: 'npx', env: { - PATH: [path, ...PATH].join(delimiter) + PATH: [path, ...PATH].join(delimiter), }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -163,23 +164,22 @@ t.test('npm exec foo, already present locally', async t => { const path = t.testdir() npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } await exec(['foo'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -189,7 +189,7 @@ t.test('npm exec foo, already present locally', async t => { stdioString: true, event: 'npx', env: { PATH: process.env.PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -198,26 +198,25 @@ t.test('npm exec foo, not present locally or in central loc', async t => { const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } await exec(['foo'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: false}], 'need to install foo@') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -228,7 +227,7 @@ t.test('npm exec foo, not present locally or in central loc', async t => { stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -237,26 +236,25 @@ t.test('npm exec foo, not present locally but in central loc', async t => { const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } await exec(['foo'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.match(ARB_REIFY, [], 'no need to install again, already there') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -267,7 +265,7 @@ t.test('npm exec foo, not present locally but in central loc', async t => { stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -276,26 +274,25 @@ t.test('npm exec foo, present locally but wrong version', async t => { const installDir = resolve('cache-dir/_npx/2badf4630f1cfaad') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS['foo@2.x'] = { name: 'foo', version: '2.3.4', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@2.x' + _from: 'foo@2.x', } await exec(['foo@2.x'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.match(ARB_REIFY, [{ add: ['foo@2.x'], legacyPeerDeps: false }], 'need to add foo@2.x') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -306,7 +303,7 @@ t.test('npm exec foo, present locally but wrong version', async t => { stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -314,24 +311,23 @@ t.test('npm exec --package=foo bar', async t => { const path = t.testdir() npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } npm.flatOptions.package = ['foo'] await exec(['bar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -341,7 +337,7 @@ t.test('npm exec --package=foo bar', async t => { stdioString: true, event: 'npx', env: { PATH: process.env.PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -351,28 +347,27 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { version: '1.2.3', bin: { foo: 'foo', - bar: 'bar' - } + bar: 'bar', + }, } const path = t.testdir({ node_modules: { '@foo/bar': { - 'package.json': JSON.stringify(foobarManifest) - } - } + 'package.json': JSON.stringify(foobarManifest), + }, + }, }) npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]) + children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), } MANIFESTS['@foo/bar'] = foobarManifest await exec(['@foo/bar', '--some=arg'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [ { package: ['@foo/bar'], path } ]) + t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -382,7 +377,7 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { stdioString: true, event: 'npx', env: { PATH: process.env.PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -394,27 +389,26 @@ t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locall baz: 'corge', // pick the first one qux: 'corge', quux: 'corge', - } + }, } const path = t.testdir({ node_modules: { '@foo/bar': { - 'package.json': JSON.stringify(foobarManifest) - } - } + 'package.json': JSON.stringify(foobarManifest), + }, + }, }) npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]) + children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), } MANIFESTS['@foo/bar'] = foobarManifest await exec(['@foo/bar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [ { package: ['@foo/bar'], path } ]) + t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -424,7 +418,7 @@ t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locall stdioString: true, event: 'npx', env: { PATH: process.env.PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -432,7 +426,7 @@ t.test('npm exec @foo/bar, with different bin alias and no unscoped named bin, l const path = t.testdir() npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]) + children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), } MANIFESTS['@foo/bar'] = { name: '@foo/bar', @@ -443,15 +437,14 @@ t.test('npm exec @foo/bar, with different bin alias and no unscoped named bin, l baz: 'quux', }, _from: 'foo@', - _id: '@foo/bar@1.2.3' + _id: '@foo/bar@1.2.3', } return t.rejects(exec(['@foo/bar'], er => { - if (er) { + if (er) throw er - } }), { message: 'could not determine executable to run', - pkgid: '@foo/bar@1.2.3' + pkgid: '@foo/bar@1.2.3', }) }) @@ -468,34 +461,33 @@ t.test('run command with 2 packages, need install, verify sort', t => { const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -506,7 +498,7 @@ t.test('run command with 2 packages, need install, verify sort', t => { stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) } @@ -516,21 +508,20 @@ t.test('npm exec foo, no bin in package', t => { const path = t.testdir() npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', _from: 'foo@', - _id: 'foo@1.2.3' + _id: 'foo@1.2.3', } return t.rejects(exec(['foo'], er => { - if (er) { + if (er) throw er - } }), { message: 'could not determine executable to run', - pkgid: 'foo@1.2.3' + pkgid: 'foo@1.2.3', }) }) @@ -538,25 +529,24 @@ t.test('npm exec foo, many bins in package, none named foo', t => { const path = t.testdir() npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { bar: 'bar', - baz: 'baz' + baz: 'baz', }, _from: 'foo@', - _id: 'foo@1.2.3' + _id: 'foo@1.2.3', } return t.rejects(exec(['foo'], er => { - if (er) { + if (er) throw er - } }), { message: 'could not determine executable to run', - pkgid: 'foo@1.2.3' + pkgid: 'foo@1.2.3', }) }) @@ -566,20 +556,19 @@ t.test('npm exec -p foo -c "ls -laF"', async t => { npm.flatOptions.package = ['foo'] npm.flatOptions.call = 'ls -laF' ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]) + children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', - _from: 'foo@' + _from: 'foo@', } await exec([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [ { package: ['foo'], path } ]) + t.match(ARB_CTOR, [{ package: ['foo'], path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.match(RUN_SCRIPTS, [{ @@ -589,7 +578,7 @@ t.test('npm exec -p foo -c "ls -laF"', async t => { stdioString: true, event: 'npx', env: { PATH: process.env.PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) }) @@ -621,34 +610,33 @@ t.test('prompt when installs are needed if not already present and shell is a TT const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -659,11 +647,11 @@ t.test('prompt when installs are needed if not already present and shell is a TT stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) t.strictSame(READ, [{ prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y' + default: 'y', }]) }) @@ -690,34 +678,33 @@ t.test('skip prompt when installs are needed if not already present and shell is const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -728,7 +715,7 @@ t.test('skip prompt when installs are needed if not already present and shell is stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) t.strictSame(READ, [], 'should not have prompted') t.strictSame(LOG_WARN, [['exec', 'The following packages were not found and will be installed: bar, foo']], 'should have printed a warning') @@ -757,26 +744,25 @@ t.test('skip prompt when installs are needed if not already present and shell is const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } await exec(['foobar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install the package') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` @@ -787,7 +773,7 @@ t.test('skip prompt when installs are needed if not already present and shell is stdioString: true, event: 'npx', env: { PATH }, - stdio: 'inherit' + stdio: 'inherit', }]) t.strictSame(READ, [], 'should not have prompted') t.strictSame(LOG_WARN, [['exec', 'The following package was not found and will be installed: foo']], 'should have printed a warning') @@ -811,43 +797,42 @@ t.test('abort if prompt rejected', async t => { npm.flatOptions.package = packages npm.flatOptions.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) const path = t.testdir() const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { t.equal(er, 'canceled', 'should be canceled') }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) t.strictSame(READ, [{ prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y' + default: 'y', }]) }) @@ -869,43 +854,42 @@ t.test('abort if prompt false', async t => { npm.flatOptions.package = packages npm.flatOptions.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) const path = t.testdir() const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { t.equal(er, 'canceled', 'should be canceled') }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) t.strictSame(READ, [{ prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y' + default: 'y', }]) }) @@ -926,37 +910,36 @@ t.test('abort if -n provided', async t => { npm.flatOptions.package = packages npm.flatOptions.yes = false - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b)) const path = t.testdir() const installDir = resolve('cache-dir/_npx/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } MANIFESTS.bar = { name: 'bar', version: '1.2.3', bin: { - bar: 'bar' + bar: 'bar', }, - _from: 'bar@' + _from: 'bar@', } await exec(['foobar'], er => { t.equal(er, 'canceled', 'should be canceled') }) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [ { package: packages, path } ]) + t.match(ARB_CTOR, [{ package: packages, path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) @@ -968,25 +951,24 @@ t.test('forward legacyPeerDeps opt', async t => { const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map() + children: new Map(), } ARB_ACTUAL_TREE[installDir] = { - children: new Map() + children: new Map(), } MANIFESTS.foo = { name: 'foo', version: '1.2.3', bin: { - foo: 'foo' + foo: 'foo', }, - _from: 'foo@' + _from: 'foo@', } npm.flatOptions.yes = true npm.flatOptions.legacyPeerDeps = true await exec(['foo'], er => { - if (er) { + if (er) throw er - } }) t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: true}], 'need to install foo@ using legacyPeerDeps opt') }) diff --git a/deps/npm/test/lib/explain.js b/deps/npm/test/lib/explain.js index a9db344f8b20ca..1eeca8c4c4f5d0 100644 --- a/deps/npm/test/lib/explain.js +++ b/deps/npm/test/lib/explain.js @@ -3,7 +3,7 @@ const requireInject = require('require-inject') const npm = { prefix: null, color: true, - flatOptions: {} + flatOptions: {}, } const { resolve } = require('path') @@ -20,8 +20,8 @@ const explain = requireInject('../../lib/explain.js', { '../../lib/utils/explain-dep.js': { explainNode: (expl, depth, color) => { return `${expl.name}@${expl.version} depth=${depth} color=${color}` - } - } + }, + }, }) t.test('no args throws usage', async t => { @@ -68,15 +68,15 @@ t.test('explain some nodes', async t => { name: 'foo', version: '1.2.3', dependencies: { - bar: '*' - } - }) + bar: '*', + }, + }), }, bar: { 'package.json': JSON.stringify({ name: 'bar', - version: '1.2.3' - }) + version: '1.2.3', + }), }, baz: { 'package.json': JSON.stringify({ @@ -84,40 +84,39 @@ t.test('explain some nodes', async t => { version: '1.2.3', dependencies: { foo: '*', - bar: '2' - } + bar: '2', + }, }), node_modules: { bar: { 'package.json': JSON.stringify({ name: 'bar', - version: '2.3.4' - }) + version: '2.3.4', + }), }, extra: { 'package.json': JSON.stringify({ name: 'extra', version: '99.9999.999999', - description: 'extraneous package' - }) - } - } - } + description: 'extraneous package', + }), + }, + }, + }, }, 'package.json': JSON.stringify({ dependencies: { - baz: '1' - } - }) + baz: '1', + }, + }), }) // works with either a full actual path or the location const p = 'node_modules/foo' for (const path of [p, resolve(npm.prefix, p)]) { await explain([path], er => { - if (er) { + if (er) throw er - } }) t.strictSame(OUTPUT, [['foo@1.2.3 depth=Infinity color=true']]) OUTPUT.length = 0 @@ -125,44 +124,40 @@ t.test('explain some nodes', async t => { // finds all nodes by name await explain(['bar'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(OUTPUT, [[ 'bar@1.2.3 depth=Infinity color=true\n\n' + - 'bar@2.3.4 depth=Infinity color=true' + 'bar@2.3.4 depth=Infinity color=true', ]]) OUTPUT.length = 0 // finds only nodes that match the spec await explain(['bar@1'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(OUTPUT, [['bar@1.2.3 depth=Infinity color=true']]) OUTPUT.length = 0 // finds extraneous nodes await explain(['extra'], er => { - if (er) { + if (er) throw er - } }) t.strictSame(OUTPUT, [['extra@99.9999.999999 depth=Infinity color=true']]) OUTPUT.length = 0 npm.flatOptions.json = true await explain(['node_modules/foo'], er => { - if (er) { + if (er) throw er - } }) t.match(JSON.parse(OUTPUT[0][0]), [{ name: 'foo', version: '1.2.3', - dependents: Array + dependents: Array, }]) OUTPUT.length = 0 npm.flatOptions.json = false @@ -174,4 +169,3 @@ t.test('explain some nodes', async t => { }) }) }) - diff --git a/deps/npm/test/lib/explore.js b/deps/npm/test/lib/explore.js index 03ad230489d17d..64c70bcce7ef62 100644 --- a/deps/npm/test/lib/explore.js +++ b/deps/npm/test/lib/explore.js @@ -13,12 +13,12 @@ let SPAWN_EXIT_CODE = 0 let SPAWN_SHELL_EXEC = null let SPAWN_SHELL_ARGS = null const mockSpawn = (sh, shellArgs, opts) => { - if (sh !== 'shell-command') { + if (sh !== 'shell-command') throw new Error('got wrong shell command') - } - if (SPAWN_ERROR) { + + if (SPAWN_ERROR) return Promise.reject(SPAWN_ERROR) - } + SPAWN_SHELL_EXEC = sh SPAWN_SHELL_ARGS = shellArgs return Promise.resolve({ code: SPAWN_EXIT_CODE }) @@ -29,28 +29,28 @@ let ERROR_HANDLER_CALLED = null const getExplore = windows => requireInject('../../lib/explore.js', { '../../lib/utils/is-windows.js': windows, '../../lib/utils/escape-arg.js': requireInject('../../lib/utils/escape-arg.js', { - '../../lib/utils/is-windows.js': windows + '../../lib/utils/is-windows.js': windows, }), path: require('path')[windows ? 'win32' : 'posix'], '../../lib/utils/escape-exec-path.js': requireInject('../../lib/utils/escape-arg.js', { - '../../lib/utils/is-windows.js': windows + '../../lib/utils/is-windows.js': windows, }), '../../lib/utils/error-handler.js': er => { ERROR_HANDLER_CALLED = er }, fs: { - stat: mockStat + stat: mockStat, }, '../../lib/npm.js': { dir: windows ? 'c:\\npm\\dir' : '/npm/dir', flatOptions: { - shell: 'shell-command' - } + shell: 'shell-command', + }, }, '@npmcli/promise-spawn': mockSpawn, '../../lib/utils/output.js': out => { output.push(out) - } + }, }) const windowsExplore = getExplore(true) @@ -63,42 +63,42 @@ t.test('basic interactive', t => { }) t.test('windows', t => windowsExplore(['pkg'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: 'c:\\npm\\dir\\pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: [] + SPAWN_SHELL_ARGS: [], }) t.strictSame(output, [ - "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n" + "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n", ]) })) t.test('posix', t => posixExplore(['pkg'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: '/npm/dir/pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: [] + SPAWN_SHELL_ARGS: [], }) t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n" + "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", ]) })) @@ -120,43 +120,43 @@ t.test('interactive tracks exit code', t => { }) t.test('windows', t => windowsExplore(['pkg'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: 'c:\\npm\\dir\\pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: [] + SPAWN_SHELL_ARGS: [], }) t.strictSame(output, [ - "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n" + "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n", ]) t.equal(process.exitCode, 99) })) t.test('posix', t => posixExplore(['pkg'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: '/npm/dir/pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: [] + SPAWN_SHELL_ARGS: [], }) t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n" + "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", ]) t.equal(process.exitCode, 99) })) @@ -166,14 +166,14 @@ t.test('interactive tracks exit code', t => { SPAWN_ERROR = null }) SPAWN_ERROR = Object.assign(new Error('glorb'), { - code: 33 + code: 33, }) return posixExplore(['pkg'], er => { - if (er) { + if (er) throw er - } + t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n" + "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", ]) t.equal(process.exitCode, 33) }) @@ -189,14 +189,14 @@ t.test('basic non-interactive', t => { }) t.test('windows', t => windowsExplore(['pkg', 'ls'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: 'c:\\npm\\dir\\pkg', @@ -206,25 +206,25 @@ t.test('basic non-interactive', t => { '/s', '/c', '"ls"', - ] + ], }) t.strictSame(output, []) })) t.test('posix', t => posixExplore(['pkg', 'ls'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: '/npm/dir/pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: ['-c', 'ls'] + SPAWN_SHELL_ARGS: ['-c', 'ls'], }) t.strictSame(output, []) })) @@ -239,19 +239,19 @@ t.test('usage if no pkg provided', t => { }) t.plan(1) posixExplore([], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED: null, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: '/npm/dir/pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: ['-c', 'ls'] + SPAWN_SHELL_ARGS: ['-c', 'ls'], }) }).catch(er => t.equal(er, 'npm explore [ -- ]')) }) @@ -261,19 +261,19 @@ t.test('pkg not installed', t => { t.plan(1) posixExplore(['pkg', 'ls'], er => { - if (er) { + if (er) throw er - } + t.strictSame({ ERROR_HANDLER_CALLED, STAT_CALLED, SPAWN_SHELL_EXEC, - SPAWN_SHELL_ARGS + SPAWN_SHELL_ARGS, }, { ERROR_HANDLER_CALLED: null, STAT_CALLED: '/npm/dir/pkg', SPAWN_SHELL_EXEC: 'shell-command', - SPAWN_SHELL_ARGS: ['-c', 'ls'] + SPAWN_SHELL_ARGS: ['-c', 'ls'], }) t.strictSame(output, []) }).catch(er => { diff --git a/deps/npm/test/lib/find-dupes.js b/deps/npm/test/lib/find-dupes.js index 2f6272b90c1296..73c8fa2dc2793f 100644 --- a/deps/npm/test/lib/find-dupes.js +++ b/deps/npm/test/lib/find-dupes.js @@ -1,5 +1,4 @@ const { test } = require('tap') -const findDupes = require('../../lib/find-dupes.js') const requireInject = require('require-inject') test('should run dedupe in dryRun mode', (t) => { @@ -7,11 +6,10 @@ test('should run dedupe in dryRun mode', (t) => { '../../lib/dedupe.js': function (args, cb) { t.ok(args.dryRun, 'dryRun is true') cb() - } + }, }) findDupes(null, () => { t.ok(true, 'callback is called') t.end() }) }) - diff --git a/deps/npm/test/lib/fund.js b/deps/npm/test/lib/fund.js index fc6a63aa17752a..a23fc88ced89e3 100644 --- a/deps/npm/test/lib/fund.js +++ b/deps/npm/test/lib/fund.js @@ -6,7 +6,7 @@ const requireInject = require('require-inject') const version = '1.0.0' const funding = { type: 'individual', - url: 'http://example.com/donate' + url: 'http://example.com/donate', } const maintainerOwnsAllDeps = { @@ -16,8 +16,8 @@ const maintainerOwnsAllDeps = { funding, dependencies: { 'dep-foo': '*', - 'dep-bar': '*' - } + 'dep-bar': '*', + }, }), node_modules: { 'dep-foo': { @@ -26,27 +26,27 @@ const maintainerOwnsAllDeps = { version, funding, dependencies: { - 'dep-sub-foo': '*' - } + 'dep-sub-foo': '*', + }, }), node_modules: { 'dep-sub-foo': { 'package.json': JSON.stringify({ name: 'dep-sub-foo', version, - funding - }) - } - } + funding, + }), + }, + }, }, 'dep-bar': { 'package.json': JSON.stringify({ name: 'dep-bar', version, - funding - }) - } - } + funding, + }), + }, + }, } const nestedNoFundingPackages = { @@ -54,11 +54,11 @@ const nestedNoFundingPackages = { name: 'nested-no-funding-packages', version, dependencies: { - foo: '*' + foo: '*', }, devDependencies: { - lorem: '*' - } + lorem: '*', + }, }), node_modules: { foo: { @@ -66,38 +66,38 @@ const nestedNoFundingPackages = { name: 'foo', version, dependencies: { - bar: '*' - } + bar: '*', + }, }), node_modules: { bar: { 'package.json': JSON.stringify({ name: 'bar', version, - funding + funding, }), node_modules: { 'sub-bar': { 'package.json': JSON.stringify({ name: 'sub-bar', version, - funding: 'https://example.com/sponsor' - }) - } - } - } - } + funding: 'https://example.com/sponsor', + }), + }, + }, + }, + }, }, lorem: { 'package.json': JSON.stringify({ name: 'lorem', version, funding: { - url: 'https://example.com/lorem' - } - }) - } - } + url: 'https://example.com/lorem', + }, + }), + }, + }, } const nestedMultipleFundingPackages = { @@ -106,14 +106,14 @@ const nestedMultipleFundingPackages = { version, funding: [ 'https://one.example.com', - 'https://two.example.com' + 'https://two.example.com', ], dependencies: { - foo: '*' + foo: '*', }, devDependencies: { - bar: '*' - } + bar: '*', + }, }), node_modules: { foo: { @@ -123,9 +123,9 @@ const nestedMultipleFundingPackages = { funding: [ 'http://example.com', { url: 'http://sponsors.example.com/me' }, - 'http://collective.example.com' - ] - }) + 'http://collective.example.com', + ], + }), }, bar: { 'package.json': JSON.stringify({ @@ -133,11 +133,11 @@ const nestedMultipleFundingPackages = { version, funding: [ 'http://collective.example.com', - { url: 'http://sponsors.example.com/you' } - ] - }) - } - } + { url: 'http://sponsors.example.com/you' }, + ], + }), + }, + }, } const conflictingFundingPackages = { @@ -145,19 +145,19 @@ const conflictingFundingPackages = { name: 'conflicting-funding-packages', version, dependencies: { - foo: '1.0.0' + foo: '1.0.0', }, devDependencies: { - bar: '1.0.0' - } + bar: '1.0.0', + }, }), node_modules: { foo: { 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0', - funding: 'http://example.com/1' - }) + funding: 'http://example.com/1', + }), }, bar: { node_modules: { @@ -165,19 +165,19 @@ const conflictingFundingPackages = { 'package.json': JSON.stringify({ name: 'foo', version: '2.0.0', - funding: 'http://example.com/2' - }) - } + funding: 'http://example.com/2', + }), + }, }, 'package.json': JSON.stringify({ name: 'bar', version: '1.0.0', dependencies: { - foo: '2.0.0' - } - }) - } - } + foo: '2.0.0', + }, + }), + }, + }, } let result = '' @@ -188,7 +188,7 @@ const _flatOptions = { global: false, prefix: undefined, unicode: false, - which: undefined + which: undefined, } const openUrl = (url, msg, cb) => { if (url === 'http://npmjs.org') { @@ -198,35 +198,39 @@ const openUrl = (url, msg, cb) => { if (_flatOptions.json) { printUrl = JSON.stringify({ title: msg, - url: url + url: url, }) - } else { + } else printUrl = `${msg}:\n ${url}` - } + cb() } const fund = requireInject('../../lib/fund.js', { '../../lib/npm.js': { flatOptions: _flatOptions, - get prefix () { return _flatOptions.prefix } + get prefix () { + return _flatOptions.prefix + }, }, '../../lib/utils/open-url.js': openUrl, - '../../lib/utils/output.js': msg => { result += msg + '\n' }, + '../../lib/utils/output.js': msg => { + result += msg + '\n' + }, pacote: { manifest: (arg) => arg.name === 'ntl' ? Promise.resolve({ - funding: 'http://example.com/pacote' + funding: 'http://example.com/pacote', }) - : Promise.reject(new Error('ERROR')) - } + : Promise.reject(new Error('ERROR')), + }, }) test('fund with no package containing funding', t => { _flatOptions.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'no-funding-package', - version: '0.0.0' - }) + version: '0.0.0', + }), }) fund([], (err) => { @@ -264,7 +268,7 @@ test('fund in which same maintainer owns all its deps, using --json option', t = dependencies: { 'dep-bar': { version: '1.0.0', - funding: { type: 'individual', url: 'http://example.com/donate' } + funding: { type: 'individual', url: 'http://example.com/donate' }, }, 'dep-foo': { version: '1.0.0', @@ -272,11 +276,11 @@ test('fund in which same maintainer owns all its deps, using --json option', t = dependencies: { 'dep-sub-foo': { version: '1.0.0', - funding: { type: 'individual', url: 'http://example.com/donate' } - } - } - } - } + funding: { type: 'individual', url: 'http://example.com/donate' }, + }, + }, + }, + }, }, 'should print stack packages together' ) @@ -317,13 +321,13 @@ test('fund containing multi-level nested deps with no funding, using --json opti dependencies: { lorem: { version: '1.0.0', - funding: { url: 'https://example.com/lorem' } + funding: { url: 'https://example.com/lorem' }, }, bar: { version: '1.0.0', - funding: { type: 'individual', url: 'http://example.com/donate' } - } - } + funding: { type: 'individual', url: 'http://example.com/donate' }, + }, + }, }, 'should omit dependencies with no funding declared in json output' ) @@ -348,39 +352,39 @@ test('fund containing multi-level nested deps with no funding, using --json opti version: '1.0.0', funding: [ { - url: 'https://one.example.com' + url: 'https://one.example.com', }, { - url: 'https://two.example.com' - } + url: 'https://two.example.com', + }, ], dependencies: { bar: { version: '1.0.0', funding: [ { - url: 'http://collective.example.com' + url: 'http://collective.example.com', }, { - url: 'http://sponsors.example.com/you' - } - ] + url: 'http://sponsors.example.com/you', + }, + ], }, foo: { version: '1.0.0', funding: [ { - url: 'http://example.com' + url: 'http://example.com', }, { - url: 'http://sponsors.example.com/me' + url: 'http://sponsors.example.com/me', }, { - url: 'http://collective.example.com' - } - ] - } - } + url: 'http://collective.example.com', + }, + ], + }, + }, }, 'should list multiple funding entries in json output' ) @@ -440,8 +444,8 @@ test('fund using string shorthand', t => { 'package.json': JSON.stringify({ name: 'funding-string-shorthand', version: '0.0.0', - funding: 'https://example.com/sponsor' - }) + funding: 'https://example.com/sponsor', + }), }) fund(['.'], (err) => { @@ -469,18 +473,18 @@ test('fund using symlink ref', t => { _flatOptions.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'using-symlink-ref', - version: '1.0.0' + version: '1.0.0', }), a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', - funding: 'http://example.com/a' - }) + funding: 'http://example.com/a', + }), }, node_modules: { - a: t.fixture('symlink', '../a') - } + a: t.fixture('symlink', '../a'), + }, }) // using symlinked ref @@ -515,33 +519,33 @@ test('fund using data from actual tree', t => { _flatOptions.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'using-actual-tree', - version: '1.0.0' + version: '1.0.0', }), node_modules: { a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', - funding: 'http://example.com/a' - }) + funding: 'http://example.com/a', + }), }, b: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', - funding: 'http://example.com/b' + funding: 'http://example.com/b', }), node_modules: { a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.1', - funding: 'http://example.com/_AAA' - }) - } - } - } - } + funding: 'http://example.com/_AAA', + }), + }, + }, + }, + }, }) // using symlinked ref @@ -595,7 +599,7 @@ test('fund using package argument with no browser, using --json option', t => { JSON.parse(printUrl), { title: 'individual funding available at the following URL', - url: 'http://example.com/donate' + url: 'http://example.com/donate', }, 'should open funding url using json output' ) @@ -676,8 +680,8 @@ test('fund pkg missing version number', t => { _flatOptions.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'foo', - funding: 'http://example.com/foo' - }) + funding: 'http://example.com/foo', + }), }) fund([], (err) => { @@ -693,8 +697,8 @@ test('fund a package throws on openUrl', t => { 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0', - funding: 'http://npmjs.org' - }) + funding: 'http://npmjs.org', + }), }) fund(['.'], (err) => { @@ -711,14 +715,14 @@ test('fund a package with type and multiple sources', t => { funding: [ { type: 'Foo', - url: 'http://example.com/foo' + url: 'http://example.com/foo', }, { type: 'Lorem', - url: 'http://example.com/foo-lorem' - } - ] - }) + url: 'http://example.com/foo-lorem', + }, + ], + }), }) fund(['.'], (err) => { @@ -738,16 +742,16 @@ test('fund colors', t => { dependencies: { a: '^1.0.0', b: '^1.0.0', - c: '^1.0.0' - } + c: '^1.0.0', + }, }), node_modules: { a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', - funding: 'http://example.com/a' - }) + funding: 'http://example.com/a', + }), }, b: { 'package.json': JSON.stringify({ @@ -756,32 +760,32 @@ test('fund colors', t => { funding: 'http://example.com/b', dependencies: { d: '^1.0.0', - e: '^1.0.0' - } - }) + e: '^1.0.0', + }, + }), }, c: { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', - funding: 'http://example.com/b' - }) + funding: 'http://example.com/b', + }), }, d: { 'package.json': JSON.stringify({ name: 'd', version: '1.0.0', - funding: 'http://example.com/d' - }) + funding: 'http://example.com/d', + }), }, e: { 'package.json': JSON.stringify({ name: 'e', version: '1.0.0', - funding: 'http://example.com/e' - }) - } - } + funding: 'http://example.com/e', + }), + }, + }, }) _flatOptions.color = true @@ -802,8 +806,8 @@ test('sub dep with fund info and a parent with no funding info', t => { version: '1.0.0', dependencies: { a: '^1.0.0', - b: '^1.0.0' - } + b: '^1.0.0', + }, }), node_modules: { a: { @@ -811,16 +815,16 @@ test('sub dep with fund info and a parent with no funding info', t => { name: 'a', version: '1.0.0', dependencies: { - c: '^1.0.0' - } - }) + c: '^1.0.0', + }, + }), }, b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', - funding: 'http://example.com/b' - }) + funding: 'http://example.com/b', + }), }, c: { 'package.json': JSON.stringify({ @@ -828,11 +832,11 @@ test('sub dep with fund info and a parent with no funding info', t => { version: '1.0.0', funding: [ 'http://example.com/c', - 'http://example.com/c-other' - ] - }) - } - } + 'http://example.com/c-other', + ], + }), + }, + }, }) fund([], (err) => { diff --git a/deps/npm/test/lib/get.js b/deps/npm/test/lib/get.js index 5f2f29bd922ae1..5260c00bae795c 100644 --- a/deps/npm/test/lib/get.js +++ b/deps/npm/test/lib/get.js @@ -9,9 +9,9 @@ test('should retrieve values from npm.commands.config', (t) => { t.equal(action, 'get', 'should use config get action') t.equal(arg, 'foo', 'should use expected key') t.end() - } - } - } + }, + }, + }, }) get(['foo']) diff --git a/deps/npm/test/lib/init.js b/deps/npm/test/lib/init.js new file mode 100644 index 00000000000000..cb15eac8fc2eb8 --- /dev/null +++ b/deps/npm/test/lib/init.js @@ -0,0 +1,211 @@ +const t = require('tap') +const requireInject = require('require-inject') + +let result = '' +const npmLog = { + disableProgress: () => null, + enableProgress: () => null, + info: () => null, + pause: () => null, + resume: () => null, + silly: () => null, +} +const npm = { + config: { set () {} }, + flatOptions: {}, + log: npmLog, +} +const mocks = { + 'init-package-json': (dir, initFile, config, cb) => cb(null, 'data'), + '../../lib/npm.js': npm, + '../../lib/utils/usage.js': () => 'usage instructions', + '../../lib/utils/output.js': (...msg) => { + result += msg.join('\n') + }, +} +const init = requireInject('../../lib/init.js', mocks) + +t.afterEach(cb => { + result = '' + npm.config = { get: () => '', set () {} } + npm.commands = {} + npm.flatOptions = {} + npm.log = npmLog + cb() +}) + +t.test('classic npm init no args', t => { + npm.config = { + get () { + return '~/.npm-init.js' + }, + } + init([], err => { + t.ifError(err, 'npm init no args') + t.matchSnapshot(result, 'should print helper info') + t.end() + }) +}) + +t.test('classic npm init -y', t => { + t.plan(7) + npm.config = { + get: () => '~/.npm-init.js', + } + npm.flatOptions = { + yes: true, + } + npm.log = { ...npm.log } + npm.log.silly = (title, msg) => { + t.equal(title, 'package data', 'should print title') + t.equal(msg, 'data', 'should print pkg data info') + } + npm.log.resume = () => { + t.ok('should resume logs') + } + npm.log.info = (title, msg) => { + t.equal(title, 'init', 'should print title') + t.equal(msg, 'written successfully', 'should print done info') + } + init([], err => { + t.ifError(err, 'npm init -y') + t.equal(result, '') + }) +}) + +t.test('npm init ', t => { + t.plan(4) + npm.config = { + set (key, val) { + t.equal(key, 'package', 'should set package key') + t.deepEqual(val, [], 'should set empty array value') + }, + } + npm.commands.exec = (arr, cb) => { + t.deepEqual( + arr, + ['create-react-app'], + 'should npx with listed packages' + ) + cb() + } + init(['react-app'], err => { + t.ifError(err, 'npm init react-app') + }) +}) + +t.test('npm init @scope/name', t => { + t.plan(2) + npm.commands.exec = (arr, cb) => { + t.deepEqual( + arr, + ['@npmcli/create-something'], + 'should npx with scoped packages' + ) + cb() + } + init(['@npmcli/something'], err => { + t.ifError(err, 'npm init init @scope/name') + }) +}) + +t.test('npm init git spec', t => { + t.plan(2) + npm.commands.exec = (arr, cb) => { + t.deepEqual( + arr, + ['npm/create-something'], + 'should npx with git-spec packages' + ) + cb() + } + init(['npm/something'], err => { + t.ifError(err, 'npm init init @scope/name') + }) +}) + +t.test('npm init @scope', t => { + t.plan(2) + npm.commands.exec = (arr, cb) => { + t.deepEqual( + arr, + ['@npmcli/create'], + 'should npx with @scope/create pkgs' + ) + cb() + } + init(['@npmcli'], err => { + t.ifError(err, 'npm init init @scope/create') + }) +}) + +t.test('npm init tgz', t => { + init(['something.tgz'], err => { + t.match( + err, + /Error: Unrecognized initializer: something.tgz/, + 'should throw error when using an unsupported spec' + ) + t.end() + }) +}) + +t.test('npm init @next', t => { + t.plan(2) + npm.commands.exec = (arr, cb) => { + t.deepEqual( + arr, + ['create-something@next'], + 'should npx with something@next' + ) + cb() + } + init(['something@next'], err => { + t.ifError(err, 'npm init init something@next') + }) +}) + +t.test('npm init exec error', t => { + npm.commands.exec = (arr, cb) => { + cb(new Error('ERROR')) + } + init(['something@next'], err => { + t.match( + err, + /ERROR/, + 'should exit with exec error' + ) + t.end() + }) +}) + +t.test('npm init cancel', t => { + t.plan(3) + const init = requireInject('../../lib/init.js', { + ...mocks, + 'init-package-json': (dir, initFile, config, cb) => cb( + new Error('canceled') + ), + }) + npm.log = { ...npm.log } + npm.log.warn = (title, msg) => { + t.equal(title, 'init', 'should have init title') + t.equal(msg, 'canceled', 'should log canceled') + } + init([], err => { + t.ifError(err, 'npm init cancel') + }) +}) + +t.test('npm init error', t => { + const init = requireInject('../../lib/init.js', { + ...mocks, + 'init-package-json': (dir, initFile, config, cb) => cb( + new Error('Unknown Error') + ), + }) + init([], err => { + t.match(err, /Unknown Error/, 'should throw error') + t.end() + }) +}) diff --git a/deps/npm/test/lib/install.js b/deps/npm/test/lib/install.js index 1650dcb8c0a32e..7e243e7ff35f28 100644 --- a/deps/npm/test/lib/install.js +++ b/deps/npm/test/lib/install.js @@ -14,17 +14,17 @@ test('should install using Arborist', (t) => { globalDir: 'path/to/node_modules/', prefix: 'foo', flatOptions: { - global: false + global: false, }, config: { - get: () => true - } + get: () => true, + }, }, '@npmcli/run-script': ({ event }) => { SCRIPTS.push(event) }, - 'npmlog': { - warn: () => {} + npmlog: { + warn: () => {}, }, '@npmcli/arborist': function (args) { ARB_ARGS = args @@ -33,15 +33,16 @@ test('should install using Arborist', (t) => { REIFY_CALLED = true } }, - '../../lib/utils/reify-output.js': arb => { - if (arb !== ARB_OBJ) { - throw new Error('got wrong object passed to reify-output') - } - } + '../../lib/utils/reify-finish.js': arb => { + if (arb !== ARB_OBJ) + throw new Error('got wrong object passed to reify-finish') + }, }) t.test('with args', t => { - install(['fizzbuzz'], () => { + install(['fizzbuzz'], er => { + if (er) + throw er t.match(ARB_ARGS, { global: false, path: 'foo' }) t.equal(REIFY_CALLED, true, 'called reify') t.strictSame(SCRIPTS, [], 'no scripts when adding dep') @@ -50,7 +51,9 @@ test('should install using Arborist', (t) => { }) t.test('just a local npm install', t => { - install([], () => { + install([], er => { + if (er) + throw er t.match(ARB_ARGS, { global: false, path: 'foo' }) t.equal(REIFY_CALLED, true, 'called reify') t.strictSame(SCRIPTS, [ @@ -60,7 +63,7 @@ test('should install using Arborist', (t) => { 'prepublish', 'preprepare', 'prepare', - 'postprepare' + 'postprepare', ], 'exec scripts when doing local build') t.end() }) @@ -71,42 +74,45 @@ test('should install using Arborist', (t) => { test('should install globally using Arborist', (t) => { const install = requireInject('../../lib/install.js', { + '../../lib/utils/reify-finish.js': async () => {}, '../../lib/npm.js': { globalDir: 'path/to/node_modules/', prefix: 'foo', flatOptions: { - 'global': 'true', + global: 'true', }, config: { - get: () => false - } + get: () => false, + }, }, '@npmcli/arborist': function () { this.reify = () => {} }, }) - install([], () => { + install([], er => { + if (er) + throw er t.end() }) }) test('completion to folder', (t) => { const install = requireInject('../../lib/install.js', { - 'util': { - 'promisify': (fn) => fn + '../../lib/utils/reify-finish.js': async () => {}, + util: { + promisify: (fn) => fn, }, - 'fs': { - 'readdir': (path) => { - if (path === '/') { + fs: { + readdir: (path) => { + if (path === '/') return ['arborist'] - } else { + else return ['package.json'] - } - } - } + }, + }, }) install.completion({ - partialWord: '/ar' + partialWord: '/ar', }, (er, res) => { t.equal(er, null) const expect = process.platform === 'win32' ? '\\arborist' : '/arborist' @@ -117,17 +123,18 @@ test('completion to folder', (t) => { test('completion to folder - invalid dir', (t) => { const install = requireInject('../../lib/install.js', { - 'util': { - 'promisify': (fn) => fn + '../../lib/utils/reify-finish.js': async () => {}, + util: { + promisify: (fn) => fn, }, - 'fs': { - 'readdir': () => { + fs: { + readdir: () => { throw new Error('EONT') - } - } + }, + }, }) install.completion({ - partialWord: 'path/to/folder' + partialWord: 'path/to/folder', }, (er, res) => { t.equal(er, null) t.strictSame(res, [], 'invalid dir: no matching') @@ -137,17 +144,18 @@ test('completion to folder - invalid dir', (t) => { test('completion to folder - no matches', (t) => { const install = requireInject('../../lib/install.js', { - 'util': { - 'promisify': (fn) => fn + '../../lib/utils/reify-finish.js': async () => {}, + util: { + promisify: (fn) => fn, }, - 'fs': { - 'readdir': (path) => { + fs: { + readdir: (path) => { return ['foobar'] - } - } + }, + }, }) install.completion({ - partialWord: '/pa' + partialWord: '/pa', }, (er, res) => { t.equal(er, null) t.strictSame(res, [], 'no name match') @@ -157,21 +165,21 @@ test('completion to folder - no matches', (t) => { test('completion to folder - match is not a package', (t) => { const install = requireInject('../../lib/install.js', { - 'util': { - 'promisify': (fn) => fn + '../../lib/utils/reify-finish.js': async () => {}, + util: { + promisify: (fn) => fn, }, - 'fs': { - 'readdir': (path) => { - if (path === '/') { + fs: { + readdir: (path) => { + if (path === '/') return ['arborist'] - } else { + else throw new Error('EONT') - } - } - } + }, + }, }) install.completion({ - partialWord: '/ar' + partialWord: '/ar', }, (er, res) => { t.equal(er, null) t.strictSame(res, [], 'no name match') @@ -181,7 +189,7 @@ test('completion to folder - match is not a package', (t) => { test('completion to url', (t) => { install.completion({ - partialWord: 'http://path/to/url' + partialWord: 'http://path/to/url', }, (er, res) => { t.equal(er, null) t.strictSame(res, []) @@ -191,7 +199,7 @@ test('completion to url', (t) => { test('completion', (t) => { install.completion({ - partialWord: 'toto' + partialWord: 'toto', }, (er, res) => { t.notOk(er) t.notOk(res) diff --git a/deps/npm/test/lib/link.js b/deps/npm/test/lib/link.js index aafdb8188e85cf..9b7c5df642178f 100644 --- a/deps/npm/test/lib/link.js +++ b/deps/npm/test/lib/link.js @@ -20,8 +20,10 @@ const npm = { prefix: null, flatOptions: {}, config: { - get () { return false } - } + get () { + return false + }, + }, } const printLinks = async (opts) => { let res = '' @@ -30,16 +32,15 @@ const printLinks = async (opts) => { const linkedItems = [...tree.inventory.values()] .sort((a, b) => a.pkgid.localeCompare(b.pkgid)) for (const item of linkedItems) { - if (item.target) { + if (item.target) res += `${item.path} -> ${item.target.path}\n` - } } return res } const mocks = { '../../lib/npm.js': npm, - '../../lib/utils/reify-output.js': () => reifyOutput() + '../../lib/utils/reify-output.js': () => reifyOutput(), } const link = requireInject('../../lib/link.js', mocks) @@ -54,18 +55,18 @@ t.test('link to globalDir when in current working dir of pkg and no args', (t) = a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) - } - } - } + version: '1.0.0', + }), + }, + }, + }, }, 'test-pkg-link': { 'package.json': JSON.stringify({ name: 'test-pkg-link', - version: '1.0.0' - }) - } + version: '1.0.0', + }), + }, }) npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') npm.prefix = resolve(testdir, 'test-pkg-link') @@ -75,7 +76,7 @@ t.test('link to globalDir when in current working dir of pkg and no args', (t) = const links = await printLinks({ path: resolve(npm.globalDir, '..'), - global: true + global: true, }) t.matchSnapshot(links, 'should create a global link to current pkg') @@ -97,68 +98,68 @@ t.test('link global linked pkg to local nm when using args', (t) => { foo: { 'package.json': JSON.stringify({ name: '@myscope/foo', - version: '1.0.0' - }) + version: '1.0.0', + }), }, bar: { 'package.json': JSON.stringify({ name: '@myscope/bar', - version: '1.0.0' - }) + version: '1.0.0', + }), }, - linked: t.fixture('symlink', '../../../../scoped-linked') + linked: t.fixture('symlink', '../../../../scoped-linked'), }, a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) + version: '1.0.0', + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' - }) + version: '1.0.0', + }), }, - 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link') - } - } + 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link'), + }, + }, }, 'test-pkg-link': { 'package.json': JSON.stringify({ name: 'test-pkg-link', - version: '1.0.0' - }) + version: '1.0.0', + }), }, 'link-me-too': { 'package.json': JSON.stringify({ name: 'link-me-too', - version: '1.0.0' - }) + version: '1.0.0', + }), }, 'scoped-linked': { 'package.json': JSON.stringify({ name: '@myscope/linked', - version: '1.0.0' - }) + version: '1.0.0', + }), }, 'my-project': { 'package.json': JSON.stringify({ name: 'my-project', version: '1.0.0', dependencies: { - foo: '^1.0.0' - } + foo: '^1.0.0', + }, }), node_modules: { foo: { 'package.json': JSON.stringify({ name: 'foo', - version: '1.0.0' - }) - } - } - } + version: '1.0.0', + }), + }, + }, + }, }) npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') npm.prefix = resolve(testdir, 'my-project') @@ -171,7 +172,7 @@ t.test('link global linked pkg to local nm when using args', (t) => { process.chdir(_cwd) const links = await printLinks({ - path: npm.prefix + path: npm.prefix, }) t.matchSnapshot(links, 'should create a local symlink to global pkg') @@ -188,7 +189,7 @@ t.test('link global linked pkg to local nm when using args', (t) => { '@myscope/linked', '@myscope/bar', 'a', - 'file:../link-me-too' + 'file:../link-me-too', ], (err) => { t.ifError(err, 'should not error out') }) @@ -202,23 +203,23 @@ t.test('link pkg already in global space', (t) => { lib: { node_modules: { '@myscope': { - linked: t.fixture('symlink', '../../../../scoped-linked') - } - } - } + linked: t.fixture('symlink', '../../../../scoped-linked'), + }, + }, + }, }, 'scoped-linked': { 'package.json': JSON.stringify({ name: '@myscope/linked', - version: '1.0.0' - }) + version: '1.0.0', + }), }, 'my-project': { 'package.json': JSON.stringify({ name: 'my-project', - version: '1.0.0' - }) - } + version: '1.0.0', + }), + }, }) npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') npm.prefix = resolve(testdir, 'my-project') @@ -231,7 +232,7 @@ t.test('link pkg already in global space', (t) => { process.chdir(_cwd) const links = await printLinks({ - path: npm.prefix + path: npm.prefix, }) t.matchSnapshot(links, 'should create a local symlink to global pkg') @@ -256,10 +257,10 @@ t.test('completion', (t) => { foo: {}, bar: {}, lorem: {}, - ipsum: {} - } - } - } + ipsum: {}, + }, + }, + }, }) npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') @@ -276,7 +277,9 @@ t.test('completion', (t) => { t.test('--global option', (t) => { const _config = npm.config - npm.config = { get () { return true } } + npm.config = { get () { + return true + } } link([], (err) => { npm.config = _config diff --git a/deps/npm/test/lib/ll.js b/deps/npm/test/lib/ll.js index 989800944f6cb3..7d4e2b94f2b7e2 100644 --- a/deps/npm/test/lib/ll.js +++ b/deps/npm/test/lib/ll.js @@ -7,15 +7,15 @@ const ll = requireInject('../../lib/ll.js', { config: { set: (k, v) => { configs[k] = v - } + }, }, commands: { ls: (args, cb) => { lsCalled = true cb() - } - } - } + }, + }, + }, }) const ls = require('../../lib/ls.js') diff --git a/deps/npm/test/lib/load-all-commands.js b/deps/npm/test/lib/load-all-commands.js index 1669f435e9b097..fa73b8a78d8fa2 100644 --- a/deps/npm/test/lib/load-all-commands.js +++ b/deps/npm/test/lib/load-all-commands.js @@ -4,9 +4,8 @@ const t = require('tap') const { cmdList } = require('../../lib/utils/cmd-list.js') t.test('load npm', t => npm.load(er => { - if (er) { + if (er) throw er - } })) t.test('load each command', t => { diff --git a/deps/npm/test/lib/load-all.js b/deps/npm/test/lib/load-all.js index 72879c2c4448aa..02736c18ccc387 100644 --- a/deps/npm/test/lib/load-all.js +++ b/deps/npm/test/lib/load-all.js @@ -4,9 +4,9 @@ const { resolve } = require('path') const full = process.env.npm_lifecycle_event === 'check-coverage' -if (!full) { +if (!full) t.pass('nothing to do here, not checking for full coverage') -} else { +else { // some files do config.get() on load, so have to load npm first const npm = require('../../lib/npm.js') t.test('load npm first', t => npm.load(t.end)) diff --git a/deps/npm/test/lib/logout.js b/deps/npm/test/lib/logout.js index 0d00422dc83366..96b1bcc7fe8c41 100644 --- a/deps/npm/test/lib/logout.js +++ b/deps/npm/test/lib/logout.js @@ -3,7 +3,7 @@ const { test } = require('tap') const _flatOptions = { registry: 'https://registry.npmjs.org/', - scope: '' + scope: '', } const config = {} @@ -19,8 +19,8 @@ const mocks = { 'npm-registry-fetch': npmFetch, '../../lib/npm.js': { flatOptions: _flatOptions, - config - } + config, + }, } const logout = requireInject('../../lib/logout.js', mocks) @@ -64,8 +64,8 @@ test('token logout', async (t) => { scope: '', token: '@foo/', method: 'DELETE', - ignoreBody: true - } + ignoreBody: true, + }, }, 'should call npm-registry-fetch with expected values' ) @@ -134,8 +134,8 @@ test('token scoped logout', async (t) => { scope: '@myscope', token: '@foo/', method: 'DELETE', - ignoreBody: true - } + ignoreBody: true, + }, }, 'should call npm-registry-fetch with expected values' ) @@ -241,8 +241,8 @@ test('ignore invalid scoped registry config', async (t) => { '@myscope:registry': '', token: '@foo/', method: 'DELETE', - ignoreBody: true - } + ignoreBody: true, + }, }, 'should call npm-registry-fetch with expected values' ) diff --git a/deps/npm/test/lib/ls.js b/deps/npm/test/lib/ls.js index 6a91e8c3520cd0..256ebf3534302e 100644 --- a/deps/npm/test/lib/ls.js +++ b/deps/npm/test/lib/ls.js @@ -7,28 +7,28 @@ t.cleanSnapshot = str => str.split(/\r\n/).join('\n') const simpleNmFixture = { node_modules: { - 'foo': { + foo: { 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0', dependencies: { - 'bar': '^1.0.0' - } - }) + bar: '^1.0.0', + }, + }), }, - 'bar': { + bar: { 'package.json': JSON.stringify({ name: 'bar', - version: '1.0.0' - }) + version: '1.0.0', + }), }, - 'lorem': { + lorem: { 'package.json': JSON.stringify({ name: 'lorem', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, } const diffDepTypesNmFixture = { @@ -39,9 +39,9 @@ const diffDepTypesNmFixture = { description: 'A DEV dep kind of dep', version: '1.0.0', dependencies: { - 'foo': '^1.0.0' - } - }) + foo: '^1.0.0', + }, + }), }, 'prod-dep': { 'package.json': JSON.stringify({ @@ -49,35 +49,35 @@ const diffDepTypesNmFixture = { description: 'A PROD dep kind of dep', version: '1.0.0', dependencies: { - 'bar': '^2.0.0' - } + bar: '^2.0.0', + }, }), node_modules: { bar: { 'package.json': JSON.stringify({ name: 'bar', description: 'A dep that bars', - version: '2.0.0' - }) - } - } + version: '2.0.0', + }), + }, + }, }, 'optional-dep': { 'package.json': JSON.stringify({ name: 'optional-dep', description: 'Maybe a dep?', - version: '1.0.0' - }) + version: '1.0.0', + }), }, 'peer-dep': { 'package.json': JSON.stringify({ name: 'peer-dep', description: 'Peer-dep description here', - version: '1.0.0' - }) + version: '1.0.0', + }), }, - ...simpleNmFixture.node_modules - } + ...simpleNmFixture.node_modules, + }, } let prefix @@ -95,24 +95,32 @@ const _flatOptions = { link: false, only: null, parseable: false, - get prefix () { return prefix }, - production: false + get prefix () { + return prefix + }, + production: false, } const ls = requireInject('../../lib/ls.js', { '../../lib/npm.js': { flatOptions: _flatOptions, limit: { - fetch: 3 + fetch: 3, + }, + get prefix () { + return _flatOptions.prefix + }, + get globalDir () { + return globalDir }, - get prefix () { return _flatOptions.prefix }, - get globalDir () { return globalDir }, config: { get (key) { return _flatOptions[key] - } - } + }, + }, + }, + '../../lib/utils/output.js': msg => { + result = msg }, - '../../lib/utils/output.js': msg => { result = msg } }) const redactCwd = res => @@ -136,10 +144,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -150,7 +158,7 @@ t.test('ls', (t) => { t.test('missing package.json', (t) => { prefix = t.testdir({ - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -169,10 +177,10 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - foo: '^1.0.0' - } + foo: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') @@ -194,10 +202,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['lorem'], (err) => { t.ifError(err, 'npm ls') @@ -216,10 +224,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['.'], (err) => { t.ifError(err, 'should not throw on missing dep above current level') @@ -237,10 +245,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['bar'], (err) => { t.ifError(err, 'npm ls') @@ -257,18 +265,18 @@ t.test('ls', (t) => { dependencies: { foo: '^1.0.0', lorem: '^1.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), node_modules: { ...simpleNmFixture.node_modules, ipsum: { 'package.json': JSON.stringify({ name: 'ipsum', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, }) ls(['bar@*', 'lorem@1.0.0'], (err) => { t.ifError(err, 'npm ls') @@ -284,10 +292,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['notadep'], (err) => { t.ifError(err, 'npm ls') @@ -311,10 +319,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -334,10 +342,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -357,8 +365,8 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { a: '^1.0.0', - e: '^1.0.0' - } + e: '^1.0.0', + }, }), node_modules: { a: { @@ -366,9 +374,9 @@ t.test('ls', (t) => { name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, b: { 'package.json': JSON.stringify({ @@ -376,29 +384,29 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { c: '^1.0.0', - d: '*' - } - }) + d: '*', + }, + }), }, c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) + version: '1.0.0', + }), }, d: { 'package.json': JSON.stringify({ name: 'd', - version: '1.0.0' - }) + version: '1.0.0', + }), }, e: { 'package.json': JSON.stringify({ name: 'e', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -416,10 +424,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^2.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') @@ -443,10 +451,10 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { foo: '^2.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') @@ -464,19 +472,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') @@ -493,19 +501,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps') @@ -522,29 +530,29 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0', - 'linked-dep': '^1.0.0' + lorem: '^1.0.0', + 'linked-dep': '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), 'linked-dep': { 'package.json': JSON.stringify({ name: 'linked-dep', - version: '1.0.0' - }) + version: '1.0.0', + }), }, node_modules: { 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules - } + ...diffDepTypesNmFixture.node_modules, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') @@ -559,15 +567,15 @@ t.test('ls', (t) => { name: 'print-deduped-symlinks', version: '1.0.0', dependencies: { - 'a': '^1.0.0', - 'b': '^1.0.0' - } + a: '^1.0.0', + b: '^1.0.0', + }, }), - 'b': { + b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' - }) + version: '1.0.0', + }), }, node_modules: { a: { @@ -575,12 +583,12 @@ t.test('ls', (t) => { name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, - 'b': t.fixture('symlink', '../b') - } + b: t.fixture('symlink', '../b'), + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') @@ -597,19 +605,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') @@ -626,19 +634,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps') @@ -655,19 +663,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') @@ -686,19 +694,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') @@ -711,7 +719,7 @@ t.test('ls', (t) => { t.test('json read problems', (t) => { prefix = t.testdir({ - 'package.json': '{broken json' + 'package.json': '{broken json', }) ls([], (err) => { t.match(err, { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') @@ -736,19 +744,19 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^2.0.0' // mismatching version # - } + 'peer-dep': '^2.0.0', // mismatching version # + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems') @@ -763,9 +771,9 @@ t.test('ls', (t) => { name: 'invalid-deduped-dep', version: '1.0.0', dependencies: { - 'a': '^1.0.0', - 'b': '^2.0.0' - } + a: '^1.0.0', + b: '^2.0.0', + }, }), node_modules: { a: { @@ -773,17 +781,17 @@ t.test('ls', (t) => { name: 'a', version: '1.0.0', dependencies: { - b: '^2.0.0' - } - }) + b: '^2.0.0', + }, + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems') @@ -798,9 +806,9 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'a': '^1.0.0', - 'b': '^1.0.0' - } + a: '^1.0.0', + b: '^1.0.0', + }, }), node_modules: { a: { @@ -808,11 +816,11 @@ t.test('ls', (t) => { name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) - } - } + b: '^1.0.0', + }, + }), + }, + }, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -828,9 +836,9 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', peerDependencies: { - 'peer-dep': '*' - } - }) + 'peer-dep': '*', + }, + }), }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -848,20 +856,20 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0' // mismatching version # + 'optional-dep': '^2.0.0', // mismatching version # }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -878,29 +886,29 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'a': '^1.0.0' - } + a: '^1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', dependencies: { - a: '^1.0.0' - } - }) - } - } + a: '^1.0.0', + }, + }), + }, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -916,29 +924,29 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'a': '^1.0.0' - } + a: '^1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', dependencies: { - a: '^1.0.0' - } - }) - } - } + a: '^1.0.0', + }, + }), + }, + }, }) ls(['a'], (err) => { t.ifError(err, 'npm ls') @@ -956,37 +964,37 @@ t.test('ls', (t) => { dependencies: { '@npmcli/a': '^1.0.0', '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0' - } + '@npmcli/c': '^1.0.0', + }, }), node_modules: { '@npmcli': { - 'a': { + a: { 'package.json': JSON.stringify({ name: '@npmcli/a', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) + '@npmcli/b': '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: '@npmcli/b', - version: '1.1.2' - }) + version: '1.1.2', + }), }, - 'c': { + c: { 'package.json': JSON.stringify({ name: '@npmcli/c', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) - } - } - } + '@npmcli/b': '^1.0.0', + }, + }), + }, + }, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1005,37 +1013,37 @@ t.test('ls', (t) => { dependencies: { '@npmcli/a': '^1.0.0', '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0' - } + '@npmcli/c': '^1.0.0', + }, }), node_modules: { '@npmcli': { - 'a': { + a: { 'package.json': JSON.stringify({ name: '@npmcli/a', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) + '@npmcli/b': '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: '@npmcli/b', - version: '1.1.2' - }) + version: '1.1.2', + }), }, - 'c': { + c: { 'package.json': JSON.stringify({ name: '@npmcli/c', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) - } - } - } + '@npmcli/b': '^1.0.0', + }, + }), + }, + }, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1055,37 +1063,37 @@ t.test('ls', (t) => { dependencies: { '@npmcli/a': '^1.0.0', '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0' - } + '@npmcli/c': '^1.0.0', + }, }), node_modules: { '@npmcli': { - 'a': { + a: { 'package.json': JSON.stringify({ name: '@npmcli/a', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) + '@npmcli/b': '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: '@npmcli/b', - version: '1.1.2' - }) + version: '1.1.2', + }), }, - 'c': { + c: { 'package.json': JSON.stringify({ name: '@npmcli/c', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0' - } - }) - } - } - } + '@npmcli/b': '^1.0.0', + }, + }), + }, + }, + }, }) ls(['@npmcli/b'], (err) => { t.ifError(err, 'npm ls') @@ -1103,37 +1111,37 @@ t.test('ls', (t) => { dependencies: { '@npmcli/a': '^1.0.0', '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0' - } + '@npmcli/c': '^1.0.0', + }, }), node_modules: { '@npmcli': { - 'a': { + a: { 'package.json': JSON.stringify({ name: '@npmcli/a', version: '1.0.0', dependencies: { - '@npmcli/c': '^1.0.0' - } - }) + '@npmcli/c': '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: '@npmcli/b', version: '1.1.2', dependencies: { - '@npmcli/c': '^1.0.0' - } - }) + '@npmcli/c': '^1.0.0', + }, + }), }, - 'c': { + c: { 'package.json': JSON.stringify({ name: '@npmcli/c', - version: '1.0.0' - }) - } - } - } + version: '1.0.0', + }), + }, + }, + }, }) ls(['@npmcli/c'], (err) => { t.ifError(err, 'npm ls') @@ -1148,22 +1156,22 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - a: 'npm:b@1.0.0' - } + a: 'npm:b@1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', _from: 'a@npm:b', _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', _requested: { - type: 'alias' - } - }) - } - } + type: 'alias', + }, + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') @@ -1177,11 +1185,11 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git' - } + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', + }, }), node_modules: { - 'abbrev': { + abbrev: { 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1', @@ -1194,11 +1202,11 @@ t.test('ls', (t) => { rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null - } - }) - } - } + gitCommittish: null, + }, + }), + }, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1213,36 +1221,36 @@ t.test('ls', (t) => { a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.1' - }) - } + version: '1.0.1', + }), + }, }, 'package-lock.json': JSON.stringify({ - 'name': 'npm-broken-resolved-field-test', - 'version': '1.0.0', - 'lockfileVersion': 2, - 'requires': true, - 'packages': { + name: 'npm-broken-resolved-field-test', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { '': { - 'name': 'a', - 'version': '1.0.1' - } + name: 'a', + version: '1.0.1', + }, }, - 'dependencies': { + dependencies: { a: { - 'version': '1.0.1', - 'resolved': 'foo@bar://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==' - } - } - }), - 'package.json': JSON.stringify({ - 'name': 'npm-broken-resolved-field-test', - 'version': '1.0.0', - 'dependencies': { - 'a': '^1.0.1' - } - }) + version: '1.0.1', + resolved: 'foo@bar://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + }, + }, + }), + 'package.json': JSON.stringify({ + name: 'npm-broken-resolved-field-test', + version: '1.0.0', + dependencies: { + a: '^1.0.1', + }, + }), }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1257,8 +1265,8 @@ t.test('ls', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'simple-output': '^2.0.0' - } + 'simple-output': '^2.0.0', + }, }), node_modules: { 'simple-output': { @@ -1276,17 +1284,17 @@ t.test('ls', (t) => { escapedName: 'simple-output', rawSpec: '', saveSpec: null, - fetchSpec: 'latest' + fetchSpec: 'latest', }, _requiredBy: [ '#USER', - '/' + '/', ], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output' - }) - } - } + _spec: 'simple-output', + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') @@ -1301,24 +1309,24 @@ t.test('ls', (t) => { a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) + version: '1.0.0', + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' + version: '1.0.0', }), node_modules: { c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) - } - } - } - } + version: '1.0.0', + }), + }, + }, + }, + }, }) // mimics lib/npm.js globalDir getter but pointing to fixtures @@ -1338,8 +1346,8 @@ t.test('ls', (t) => { name: 'filter-by-child-of-missing-dep', version: '1.0.0', dependencies: { - 'a': '^1.0.0' - } + a: '^1.0.0', + }, }), node_modules: { b: { @@ -1347,34 +1355,34 @@ t.test('ls', (t) => { name: 'b', version: '1.0.0', dependencies: { - c: '^1.0.0' - } - }) + c: '^1.0.0', + }, + }), }, c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) + version: '1.0.0', + }), }, d: { 'package.json': JSON.stringify({ name: 'd', version: '1.0.0', dependencies: { - c: '^2.0.0' - } + c: '^2.0.0', + }, }), node_modules: { c: { 'package.json': JSON.stringify({ name: 'c', - version: '2.0.0' - }) - } - } - } - } + version: '2.0.0', + }), + }, + }, + }, + }, }) ls(['c'], (err) => { @@ -1391,8 +1399,8 @@ t.test('ls', (t) => { version: '1.0.0', workspaces: [ './a', - './b' - ] + './b', + ], }), node_modules: { a: t.fixture('symlink', '../a'), @@ -1400,25 +1408,25 @@ t.test('ls', (t) => { c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) - } + version: '1.0.0', + }), + }, }, a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', dependencies: { - c: '^1.0.0' - } - }) + c: '^1.0.0', + }, + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' - }) - } + version: '1.0.0', + }), + }, }) ls([], (err) => { @@ -1443,44 +1451,44 @@ t.test('ls', (t) => { version: '1.0.0', dependencies: { a: '^1.0.0', - b: '^1.0.0' - } + b: '^1.0.0', + }, }), node_modules: { a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) + version: '1.0.0', + }), }, b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', dependencies: { - c: '^1.0.0' - } - }) + c: '^1.0.0', + }, + }), }, c: { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', dependencies: { - d: '^1.0.0' - } - }) + d: '^1.0.0', + }, + }), }, d: { 'package.json': JSON.stringify({ name: 'd', version: '1.0.0', dependencies: { - a: '^1.0.0' - } - }) - } - } + a: '^1.0.0', + }, + }), + }, + }, }) t.plan(6) @@ -1521,10 +1529,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1535,7 +1543,7 @@ t.test('ls --parseable', (t) => { t.test('missing package.json', (t) => { prefix = t.testdir({ - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -1554,10 +1562,10 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - foo: '^1.0.0' - } + foo: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') @@ -1573,10 +1581,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['lorem'], (err) => { t.ifError(err, 'npm ls') @@ -1592,10 +1600,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['bar'], (err) => { t.ifError(err, 'npm ls') @@ -1612,18 +1620,18 @@ t.test('ls --parseable', (t) => { dependencies: { foo: '^1.0.0', lorem: '^1.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), node_modules: { ...simpleNmFixture.node_modules, ipsum: { 'package.json': JSON.stringify({ name: 'ipsum', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, }) ls(['bar@*', 'lorem@1.0.0'], (err) => { t.ifError(err, 'npm ls') @@ -1639,10 +1647,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['notadep'], (err) => { t.ifError(err, 'npm ls') @@ -1666,10 +1674,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1689,10 +1697,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1712,10 +1720,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -1733,10 +1741,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^2.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') @@ -1753,19 +1761,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') @@ -1782,19 +1790,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps') @@ -1811,29 +1819,29 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0', - 'linked-dep': '^1.0.0' + lorem: '^1.0.0', + 'linked-dep': '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), 'linked-dep': { 'package.json': JSON.stringify({ name: 'linked-dep', - version: '1.0.0' - }) + version: '1.0.0', + }), }, node_modules: { 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules - } + ...diffDepTypesNmFixture.node_modules, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') @@ -1850,19 +1858,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') @@ -1879,19 +1887,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps') @@ -1908,19 +1916,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') @@ -1935,10 +1943,10 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - foo: '^1.0.0' - } + foo: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') @@ -1956,10 +1964,10 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { foo: '^2.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') @@ -1977,29 +1985,29 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0', - 'linked-dep': '^1.0.0' + lorem: '^1.0.0', + 'linked-dep': '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), 'linked-dep': { 'package.json': JSON.stringify({ name: 'linked-dep', - version: '1.0.0' - }) + version: '1.0.0', + }), }, node_modules: { 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules - } + ...diffDepTypesNmFixture.node_modules, + }, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -2019,19 +2027,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') @@ -2044,7 +2052,7 @@ t.test('ls --parseable', (t) => { t.test('json read problems', (t) => { prefix = t.testdir({ - 'package.json': '{broken json' + 'package.json': '{broken json', }) ls([], (err) => { t.match(err, { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') @@ -2069,19 +2077,19 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^2.0.0' // mismatching version # - } + 'peer-dep': '^2.0.0', // mismatching version # + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output parseable signaling missing peer dep in problems') @@ -2096,20 +2104,20 @@ t.test('ls --parseable', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0' // mismatching version # + 'optional-dep': '^2.0.0', // mismatching version # }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -2125,29 +2133,29 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'a': '^1.0.0' - } + a: '^1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', dependencies: { - a: '^1.0.0' - } - }) - } - } + a: '^1.0.0', + }, + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should print tree output ommiting deduped ref') @@ -2161,22 +2169,22 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - a: 'npm:b@1.0.0' - } + a: 'npm:b@1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', _from: 'a@npm:b', _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', _requested: { - type: 'alias' - } - }) - } - } + type: 'alias', + }, + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') @@ -2190,11 +2198,11 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git' - } + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', + }, }), node_modules: { - 'abbrev': { + abbrev: { 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1', @@ -2207,11 +2215,11 @@ t.test('ls --parseable', (t) => { rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null - } - }) - } - } + gitCommittish: null, + }, + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should output tree containing git refs') @@ -2225,8 +2233,8 @@ t.test('ls --parseable', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'simple-output': '^2.0.0' - } + 'simple-output': '^2.0.0', + }, }), node_modules: { 'simple-output': { @@ -2244,17 +2252,17 @@ t.test('ls --parseable', (t) => { escapedName: 'simple-output', rawSpec: '', saveSpec: null, - fetchSpec: 'latest' + fetchSpec: 'latest', }, _requiredBy: [ '#USER', - '/' + '/', ], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output' - }) - } - } + _spec: 'simple-output', + }), + }, + }, }) ls([], () => { t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') @@ -2269,24 +2277,24 @@ t.test('ls --parseable', (t) => { a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) + version: '1.0.0', + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' + version: '1.0.0', }), node_modules: { c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) - } - } - } - } + version: '1.0.0', + }), + }, + }, + }, + }, }) // mimics lib/npm.js globalDir getter but pointing to fixtures @@ -2314,10 +2322,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -2326,19 +2334,19 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'dependencies': { - 'foo': { - 'version': '1.0.0', - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0' - } - } + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + }, + }, }, 'should output json representation of dependencies structure' ) @@ -2348,46 +2356,46 @@ t.test('ls --json', (t) => { t.test('missing package.json', (t) => { prefix = t.testdir({ - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') t.deepEqual( jsonParse(result), { - 'problems': [ + problems: [ 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar', 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo', - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem' + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem', ], - 'dependencies': { - 'bar': { - 'version': '1.0.0', - 'extraneous': true, - 'problems': [ - 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar' - ] - }, - 'foo': { - 'version': '1.0.0', - 'extraneous': true, - 'problems': [ - 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo' + dependencies: { + bar: { + version: '1.0.0', + extraneous: true, + problems: [ + 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar', + ], + }, + foo: { + version: '1.0.0', + extraneous: true, + problems: [ + 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo', + ], + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + extraneous: true, + problems: [ + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem', ], - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0', - 'extraneous': true, - 'problems': [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem' - ] - } - } + }, + }, }, 'should output json missing name/version of top-level package' ) @@ -2401,10 +2409,10 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - foo: '^1.0.0' - } + foo: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.equal( @@ -2422,26 +2430,26 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'problems': [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem' + problems: [ + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem', ], - 'dependencies': { - 'foo': { - 'version': '1.0.0', - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0', - 'extraneous': true, - 'problems': [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem' - ] - } - } + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + extraneous: true, + problems: [ + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem', + ], + }, + }, }, 'should output json containing problems info' ) @@ -2456,10 +2464,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['lorem'], (err) => { t.ifError(err, 'npm ls') @@ -2468,11 +2476,11 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'dependencies': { - 'lorem': { - 'version': '1.0.0' - } - } + dependencies: { + lorem: { + version: '1.0.0', + }, + }, }, 'should output json contaning only occurences of filtered by package' ) @@ -2492,10 +2500,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['bar'], (err) => { t.ifError(err, 'npm ls') @@ -2509,11 +2517,11 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { bar: { - version: '1.0.0' - } - } - } - } + version: '1.0.0', + }, + }, + }, + }, }, 'should output json contaning only occurences of filtered by package' ) @@ -2529,39 +2537,39 @@ t.test('ls --json', (t) => { dependencies: { foo: '^1.0.0', lorem: '^1.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), node_modules: { ...simpleNmFixture.node_modules, ipsum: { 'package.json': JSON.stringify({ name: 'ipsum', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, }) ls(['bar@*', 'lorem@1.0.0'], (err) => { t.ifError(err, 'npm ls') t.deepEqual( jsonParse(result), { - 'version': '1.0.0', - 'name': 'test-npm-ls', - 'dependencies': { - 'foo': { - 'version': '1.0.0', - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0' - } - } + version: '1.0.0', + name: 'test-npm-ls', + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + }, + }, }, 'should output json contaning only occurences of multiple filtered packages and their ancestors' ) @@ -2576,10 +2584,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls(['notadep'], (err) => { t.ifError(err, 'npm ls') @@ -2587,7 +2595,7 @@ t.test('ls --json', (t) => { jsonParse(result), { name: 'test-npm-ls', - version: '1.0.0' + version: '1.0.0', }, 'should output json containing no dependencies info' ) @@ -2610,10 +2618,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -2622,14 +2630,14 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'dependencies': { - 'foo': { - 'version': '1.0.0' + dependencies: { + foo: { + version: '1.0.0', }, - 'lorem': { - 'version': '1.0.0' - } - } + lorem: { + version: '1.0.0', + }, + }, }, 'should output json containing only top-level dependencies' ) @@ -2648,10 +2656,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -2660,14 +2668,14 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'dependencies': { - 'foo': { - 'version': '1.0.0' + dependencies: { + foo: { + version: '1.0.0', }, - 'lorem': { - 'version': '1.0.0' - } - } + lorem: { + version: '1.0.0', + }, + }, }, 'should output json containing only top-level dependencies' ) @@ -2686,10 +2694,10 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^1.0.0', - lorem: '^1.0.0' - } + lorem: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.ifError(err, 'npm ls') @@ -2698,19 +2706,19 @@ t.test('ls --json', (t) => { { name: 'test-npm-ls', version: '1.0.0', - 'dependencies': { - 'foo': { - 'version': '1.0.0', - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0' - } - } + dependencies: { + foo: { + version: '1.0.0', + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + }, + }, }, 'should output json containing top-level deps and their deps only' ) @@ -2727,51 +2735,51 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { foo: '^2.0.0', - ipsum: '^1.0.0' - } + ipsum: '^1.0.0', + }, }), - ...simpleNmFixture + ...simpleNmFixture, }) ls([], (err) => { t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems') t.deepEqual( jsonParse(result), { - 'name': 'test-npm-ls', - 'version': '1.0.0', - 'problems': [ + name: 'test-npm-ls', + version: '1.0.0', + problems: [ 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem' + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem', ], - 'dependencies': { - 'foo': { - 'version': '1.0.0', - 'invalid': true, - 'problems': [ - 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo' + dependencies: { + foo: { + version: '1.0.0', + invalid: true, + problems: [ + 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo', + ], + dependencies: { + bar: { + version: '1.0.0', + }, + }, + }, + lorem: { + version: '1.0.0', + extraneous: true, + problems: [ + 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem', + ], + }, + ipsum: { + required: '^1.0.0', + missing: true, + problems: [ + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', ], - 'dependencies': { - 'bar': { - 'version': '1.0.0' - } - } - }, - 'lorem': { - 'version': '1.0.0', - 'extraneous': true, - 'problems': [ - 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem' - ] - }, - 'ipsum': { - 'required': '^1.0.0', - 'missing': true, - 'problems': [ - 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0' - ] - } - } + }, + }, }, 'should output json containing top-level deps and their deps only' ) @@ -2787,19 +2795,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -2813,11 +2821,11 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } } - } - } - } - } + dependencies: { bar: { version: '1.0.0' } }, + }, + }, + }, + }, }, 'should output json containing dev deps' ) @@ -2834,19 +2842,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -2860,11 +2868,11 @@ t.test('ls --json', (t) => { dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } } - } - } - } - } + dependencies: { bar: { version: '1.0.0' } }, + }, + }, + }, + }, }, 'should output json containing only development deps' ) @@ -2881,29 +2889,29 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0', - 'linked-dep': '^1.0.0' + lorem: '^1.0.0', + 'linked-dep': '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), 'linked-dep': { 'package.json': JSON.stringify({ name: 'linked-dep', - version: '1.0.0' - }) + version: '1.0.0', + }), }, node_modules: { 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules - } + ...diffDepTypesNmFixture.node_modules, + }, }) ls([], () => { t.deepEqual( @@ -2914,9 +2922,9 @@ t.test('ls --json', (t) => { dependencies: { 'linked-dep': { version: '1.0.0', - resolved: 'file:../linked-dep' - } - } + resolved: 'file:../linked-dep', + }, + }, }, 'should output json containing linked deps' ) @@ -2933,19 +2941,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -2956,8 +2964,8 @@ t.test('ls --json', (t) => { dependencies: { lorem: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } } - } + 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + }, }, 'should output json containing production deps' ) @@ -2974,19 +2982,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -2997,8 +3005,8 @@ t.test('ls --json', (t) => { dependencies: { lorem: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } } - } + 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + }, }, 'should output json containing only prod deps' ) @@ -3014,121 +3022,121 @@ t.test('ls --json', (t) => { 'dedupe-tests-a': { 'package.json': JSON.stringify({ name: '@isaacs/dedupe-tests-a', - version: '1.0.1' + version: '1.0.1', }), node_modules: { '@isaacs': { 'dedupe-tests-b': { name: '@isaacs/dedupe-tests-b', - version: '1.0.0' - } - } - } + version: '1.0.0', + }, + }, + }, }, 'dedupe-tests-b': { 'package.json': JSON.stringify({ name: '@isaacs/dedupe-tests-b', - version: '2.0.0' - }) - } - } + version: '2.0.0', + }), + }, + }, }, 'package-lock.json': JSON.stringify({ - 'name': 'dedupe-lockfile', - 'version': '1.0.0', - 'lockfileVersion': 2, - 'requires': true, - 'packages': { + name: 'dedupe-lockfile', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { '': { - 'name': 'dedupe-lockfile', - 'version': '1.0.0', - 'dependencies': { + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2' - } + '@isaacs/dedupe-tests-b': '1||2', + }, }, 'node_modules/@isaacs/dedupe-tests-a': { - 'name': '@isaacs/dedupe-tests-a', - 'version': '1.0.1', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - 'dependencies': { - '@isaacs/dedupe-tests-b': '1' - } + name: '@isaacs/dedupe-tests-a', + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + dependencies: { + '@isaacs/dedupe-tests-b': '1', + }, }, 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { - 'name': '@isaacs/dedupe-tests-b', - 'version': '1.0.0', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - 'integrity': 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==' + name: '@isaacs/dedupe-tests-b', + version: '1.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, 'node_modules/@isaacs/dedupe-tests-b': { - 'name': '@isaacs/dedupe-tests-b', - 'version': '2.0.0', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - 'integrity': 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==' - } + name: '@isaacs/dedupe-tests-b', + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, }, - 'dependencies': { + dependencies: { '@isaacs/dedupe-tests-a': { - 'version': '1.0.1', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - 'requires': { - '@isaacs/dedupe-tests-b': '1' + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + requires: { + '@isaacs/dedupe-tests-b': '1', }, - 'dependencies': { + dependencies: { '@isaacs/dedupe-tests-b': { - 'version': '1.0.0', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - 'integrity': 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==' - } - } + version: '1.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + }, + }, }, '@isaacs/dedupe-tests-b': { - 'version': '2.0.0', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - 'integrity': 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==' - } - } + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, + }, }), 'package.json': JSON.stringify({ - 'name': 'dedupe-lockfile', - 'version': '1.0.0', - 'dependencies': { + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2' - } - }) + '@isaacs/dedupe-tests-b': '1||2', + }, + }), }) ls([], () => { t.deepEqual( jsonParse(result), { - 'version': '1.0.0', - 'name': 'dedupe-lockfile', - 'dependencies': { + version: '1.0.0', + name: 'dedupe-lockfile', + dependencies: { '@isaacs/dedupe-tests-a': { - 'version': '1.0.1', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - 'dependencies': { + version: '1.0.1', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + dependencies: { '@isaacs/dedupe-tests-b': { - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - 'extraneous': true, - 'problems': [ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b' - ] - } - } + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + extraneous: true, + problems: [ + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + ], + }, + }, }, '@isaacs/dedupe-tests-b': { - 'version': '2.0.0', - 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz' - } + version: '2.0.0', + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + }, }, - 'problems': [ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b' - ] + problems: [ + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + ], }, 'should output json containing only prod deps' ) @@ -3144,19 +3152,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -3174,7 +3182,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long/node_modules/peer-dep', - extraneous: false + extraneous: false, }, 'dev-dep': { name: 'dev-dep', @@ -3193,23 +3201,23 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long/node_modules/bar', - extraneous: false - } + extraneous: false, + }, }, _id: 'foo@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: { bar: '^1.0.0' }, path: '{CWD}/ls-ls-json--long/node_modules/foo', - extraneous: false - } + extraneous: false, + }, }, _id: 'dev-dep@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, path: '{CWD}/ls-ls-json--long/node_modules/dev-dep', - extraneous: false + extraneous: false, }, lorem: { name: 'lorem', @@ -3219,7 +3227,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long/node_modules/lorem', - extraneous: false + extraneous: false, }, 'optional-dep': { name: 'optional-dep', @@ -3230,7 +3238,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long/node_modules/optional-dep', - extraneous: false + extraneous: false, }, 'prod-dep': { name: 'prod-dep', @@ -3246,16 +3254,16 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long/node_modules/prod-dep/node_modules/bar', - extraneous: false - } + extraneous: false, + }, }, _id: 'prod-dep@1.0.0', devDependencies: {}, peerDependencies: {}, _dependencies: { bar: '^2.0.0' }, path: '{CWD}/ls-ls-json--long/node_modules/prod-dep', - extraneous: false - } + extraneous: false, + }, }, devDependencies: { 'dev-dep': '^1.0.0' }, optionalDependencies: { 'optional-dep': '^1.0.0' }, @@ -3263,7 +3271,7 @@ t.test('ls --json', (t) => { _id: 'test-npm-ls@1.0.0', _dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' }, path: '{CWD}/ls-ls-json--long', - extraneous: false + extraneous: false, }, 'should output long json info' ) @@ -3282,19 +3290,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], () => { t.deepEqual( @@ -3312,7 +3320,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long-depth-0/node_modules/peer-dep', - extraneous: false + extraneous: false, }, 'dev-dep': { name: 'dev-dep', @@ -3323,7 +3331,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, path: '{CWD}/ls-ls-json--long-depth-0/node_modules/dev-dep', - extraneous: false + extraneous: false, }, lorem: { name: 'lorem', @@ -3333,7 +3341,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long-depth-0/node_modules/lorem', - extraneous: false + extraneous: false, }, 'optional-dep': { name: 'optional-dep', @@ -3344,7 +3352,7 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: {}, path: '{CWD}/ls-ls-json--long-depth-0/node_modules/optional-dep', - extraneous: false + extraneous: false, }, 'prod-dep': { name: 'prod-dep', @@ -3355,8 +3363,8 @@ t.test('ls --json', (t) => { peerDependencies: {}, _dependencies: { bar: '^2.0.0' }, path: '{CWD}/ls-ls-json--long-depth-0/node_modules/prod-dep', - extraneous: false - } + extraneous: false, + }, }, devDependencies: { 'dev-dep': '^1.0.0' }, optionalDependencies: { 'optional-dep': '^1.0.0' }, @@ -3364,7 +3372,7 @@ t.test('ls --json', (t) => { _id: 'test-npm-ls@1.0.0', _dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' }, path: '{CWD}/ls-ls-json--long-depth-0', - extraneous: false + extraneous: false, }, 'should output json containing top-level deps in long format' ) @@ -3377,7 +3385,7 @@ t.test('ls --json', (t) => { t.test('json read problems', (t) => { prefix = t.testdir({ - 'package.json': '{broken json' + 'package.json': '{broken json', }) ls([], (err) => { t.match(err.message, 'Failed to parse root package.json', 'should have missin root package.json msg') @@ -3387,8 +3395,8 @@ t.test('ls --json', (t) => { { invalid: true, problems: [ - 'error in {CWD}/ls-ls-json-json-read-problems: Failed to parse root package.json' - ] + 'error in {CWD}/ls-ls-json-json-read-problems: Failed to parse root package.json', + ], }, 'should print empty json result' ) @@ -3416,19 +3424,19 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { - 'optional-dep': '^1.0.0' + 'optional-dep': '^1.0.0', }, peerDependencies: { - 'peer-dep': '^2.0.0' // mismatching version # - } + 'peer-dep': '^2.0.0', // mismatching version # + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'Should have ELSPROBLEMS error code') @@ -3438,29 +3446,29 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep' + 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep', ], dependencies: { 'peer-dep': { version: '1.0.0', invalid: true, problems: [ - 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep' - ] + 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep', + ], }, 'dev-dep': { version: '1.0.0', dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } } - } - } + dependencies: { bar: { version: '1.0.0' } }, + }, + }, }, lorem: { version: '1.0.0' }, 'optional-dep': { version: '1.0.0' }, - 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } } - } + 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, + }, }, 'should output json signaling missing peer dep in problems' ) @@ -3475,20 +3483,20 @@ t.test('ls --json', (t) => { version: '1.0.0', dependencies: { 'prod-dep': '^1.0.0', - 'lorem': '^1.0.0' + lorem: '^1.0.0', }, devDependencies: { - 'dev-dep': '^1.0.0' + 'dev-dep': '^1.0.0', }, optionalDependencies: { 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0' // mismatching version # + 'optional-dep': '^2.0.0', // mismatching version # }, peerDependencies: { - 'peer-dep': '^1.0.0' - } + 'peer-dep': '^1.0.0', + }, }), - ...diffDepTypesNmFixture + ...diffDepTypesNmFixture, }) ls([], (err) => { t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') @@ -3499,32 +3507,32 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep' // mismatching optional deps get flagged in problems + 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems ], dependencies: { 'optional-dep': { version: '1.0.0', invalid: true, problems: [ - 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep' - ] + 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep', + ], }, 'peer-dep': { - version: '1.0.0' + version: '1.0.0', }, 'dev-dep': { version: '1.0.0', dependencies: { foo: { version: '1.0.0', - dependencies: { bar: { version: '1.0.0' } } - } - } + dependencies: { bar: { version: '1.0.0' } }, + }, + }, }, lorem: { version: '1.0.0' }, 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }, - 'missing-optional-dep': {} // missing optional dep has an empty entry in json output - } + 'missing-optional-dep': {}, // missing optional dep has an empty entry in json output + }, }, 'should output json with empty entry for missing optional deps' ) @@ -3538,29 +3546,29 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'a': '^1.0.0' - } + a: '^1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', dependencies: { - b: '^1.0.0' - } - }) + b: '^1.0.0', + }, + }), }, - 'b': { + b: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', dependencies: { - a: '^1.0.0' - } - }) - } - } + a: '^1.0.0', + }, + }), + }, + }, }) ls([], () => { t.deepEqual( @@ -3575,12 +3583,12 @@ t.test('ls --json', (t) => { b: { version: '1.0.0', dependencies: { - a: { version: '1.0.0' } - } - } - } - } - } + a: { version: '1.0.0' }, + }, + }, + }, + }, + }, }, 'should print json output containing deduped ref' ) @@ -3594,22 +3602,22 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - a: 'npm:b@1.0.0' - } + a: 'npm:b@1.0.0', + }, }), node_modules: { - 'a': { + a: { 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', _from: 'a@npm:b', _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', _requested: { - type: 'alias' - } - }) - } - } + type: 'alias', + }, + }), + }, + }, }) ls([], () => { t.deepEqual( @@ -3620,9 +3628,9 @@ t.test('ls --json', (t) => { dependencies: { a: { version: '1.0.0', - resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz' - } - } + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + }, + }, }, 'should output json containing aliases' ) @@ -3636,11 +3644,11 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git' - } + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', + }, }), node_modules: { - 'abbrev': { + abbrev: { 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1', @@ -3653,11 +3661,11 @@ t.test('ls --json', (t) => { rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null - } - }) - } - } + gitCommittish: null, + }, + }), + }, + }, }) ls([], () => { t.deepEqual( @@ -3668,9 +3676,9 @@ t.test('ls --json', (t) => { dependencies: { abbrev: { version: '1.1.1', - resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c' - } - } + resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + }, + }, }, 'should output json containing git refs' ) @@ -3684,8 +3692,8 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', dependencies: { - 'simple-output': '^2.0.0' - } + 'simple-output': '^2.0.0', + }, }), node_modules: { 'simple-output': { @@ -3703,17 +3711,17 @@ t.test('ls --json', (t) => { escapedName: 'simple-output', rawSpec: '', saveSpec: null, - fetchSpec: 'latest' + fetchSpec: 'latest', }, _requiredBy: [ '#USER', - '/' + '/', ], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output' - }) - } - } + _spec: 'simple-output', + }), + }, + }, }) ls([], () => { t.deepEqual( @@ -3724,9 +3732,9 @@ t.test('ls --json', (t) => { dependencies: { 'simple-output': { version: '2.1.1', - resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz' - } - } + resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + }, + }, }, 'should be printed in json output' ) @@ -3737,15 +3745,15 @@ t.test('ls --json', (t) => { t.test('node.name fallback if missing root package name', (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ - version: '1.0.0' - }) + version: '1.0.0', + }), }) ls([], () => { t.deepEqual( jsonParse(result), { - 'version': '1.0.0', - 'name': 'ls-ls-json-node-name-fallback-if-missing-root-package-name' + version: '1.0.0', + name: 'ls-ls-json-node-name-fallback-if-missing-root-package-name', }, 'should use node.name as key in json result obj' ) @@ -3760,24 +3768,24 @@ t.test('ls --json', (t) => { a: { 'package.json': JSON.stringify({ name: 'a', - version: '1.0.0' - }) + version: '1.0.0', + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' + version: '1.0.0', }), node_modules: { c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) - } - } - } - } + version: '1.0.0', + }), + }, + }, + }, + }, }) // mimics lib/npm.js globalDir getter but pointing to fixtures @@ -3787,19 +3795,19 @@ t.test('ls --json', (t) => { t.deepEqual( jsonParse(result), { - 'dependencies': { - 'a': { - 'version': '1.0.0' - }, - 'b': { - 'version': '1.0.0', - 'dependencies': { - 'c': { - 'version': '1.0.0' - } - } - } - } + dependencies: { + a: { + version: '1.0.0', + }, + b: { + version: '1.0.0', + dependencies: { + c: { + version: '1.0.0', + }, + }, + }, + }, }, 'should print json output for global deps' ) diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index f6a13b90fa5e3c..0e0adcf1db9376 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -24,14 +24,14 @@ const actualPlatform = process.platform const beWindows = () => { Object.defineProperty(process, 'platform', { value: 'win32', - configurable: true + configurable: true, }) } const bePosix = () => { Object.defineProperty(process, 'platform', { value: 'posix', - configurable: true + configurable: true, }) } @@ -41,9 +41,9 @@ const npmPath = resolve(__dirname, '..', '..') const Config = require('@npmcli/config') const { types, defaults, shorthands } = require('../../lib/utils/config.js') const freshConfig = (opts = {}) => { - for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { + for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) delete process.env[env] - } + process.env.npm_config_cache = CACHE npm.config = new Config({ @@ -52,14 +52,13 @@ const freshConfig = (opts = {}) => { shorthands, npmPath, log: npmlog, - ...opts + ...opts, }) } const logs = [] -for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) { +for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) npmlog[level] = (...msg) => logs.push([level, ...msg]) -} const npm = require('../../lib/npm.js') @@ -73,7 +72,7 @@ t.test('not yet loaded', t => { config: { loaded: false, get: Function, - set: Function + set: Function, }, version: String, }) @@ -103,7 +102,9 @@ t.test('npm.load', t => { t.test('load error', t => { const { load } = npm.config const loadError = new Error('load error') - npm.config.load = async () => { throw loadError } + npm.config.load = async () => { + throw loadError + } npm.load(er => { t.equal(er, loadError) t.equal(npm.loadErr, loadError) @@ -120,13 +121,13 @@ t.test('npm.load', t => { t.test('basic loading', t => { const dir = t.testdir({ - node_modules: {} + node_modules: {}, }) let firstCalled = false const first = (er) => { - if (er) { + if (er) throw er - } + firstCalled = true t.equal(npm.loaded, true) t.equal(npm.config.loaded, true) @@ -134,7 +135,9 @@ t.test('npm.load', t => { } let secondCalled = false - const second = () => { secondCalled = true } + const second = () => { + secondCalled = true + } t.equal(npm.loading, false, 'not loading yet') const p = npm.load(first).then(() => { @@ -142,16 +145,18 @@ t.test('npm.load', t => { t.match(npm, { loaded: true, loading: false, - flatOptions: {} + flatOptions: {}, }) t.equal(firstCalled, true, 'first callback got called') t.equal(secondCalled, true, 'second callback got called') let thirdCalled = false - const third = () => { thirdCalled = true } + const third = () => { + thirdCalled = true + } npm.load(third) t.equal(thirdCalled, true, 'third callbback got called') t.match(logs, [ - ['timing', 'npm:load', /Completed in [0-9]+ms/] + ['timing', 'npm:load', /Completed in [0-9]+ms/], ]) logs.length = 0 @@ -216,22 +221,22 @@ t.test('npm.load', t => { t.test('forceful loading', t => { // also, don't get thrown off if argv[0] isn't found for some reason - const [ argv0 ] = process.argv + const [argv0] = process.argv t.teardown(() => { process.argv[0] = argv0 }) freshConfig({ argv: [...process.argv, '--force', '--color', 'always'] }) process.argv[0] = 'this exe does not exist or else this test will fail' return npm.load(er => { - if (er) { + if (er) throw er - } + t.match(logs.filter(l => l[0] !== 'timing'), [ [ 'warn', 'using --force', - 'Recommended protections disabled.' - ] + 'Recommended protections disabled.', + ], ]) logs.length = 0 }) @@ -240,7 +245,7 @@ t.test('npm.load', t => { t.test('node is a symlink', async t => { const node = actualPlatform === 'win32' ? 'node.exe' : 'node' const dir = t.testdir({ - '.npmrc': 'foo = bar' + '.npmrc': 'foo = bar', }) // create manually to set the 'file' option in windows @@ -279,16 +284,16 @@ t.test('npm.load', t => { logs.length = 0 await npm.load(er => { - if (er) { + if (er) throw er - } + t.equal(npm.config.get('scope'), '@foo', 'added the @ sign to scope') t.equal(npm.config.get('metrics-registry'), 'http://example.com') t.match(logs.filter(l => l[0] !== 'timing' || !/^config:/.test(l[1])), [ [ 'verbose', 'node symlink', - resolve(dir, node) + resolve(dir, node), ], [ 'timing', @@ -301,9 +306,9 @@ t.test('npm.load', t => { }) await npm.commands.ll([], (er) => { - if (er) { + if (er) throw er - } + t.same(consoleLogs, [[require('../../lib/ls.js').usage]], 'print usage') consoleLogs.length = 0 npm.config.set('usage', false) @@ -312,9 +317,9 @@ t.test('npm.load', t => { }) await npm.commands.get(['scope', '\u2010not-a-dash'], (er) => { - if (er) { + if (er) throw er - } + t.match(logs, [ [ 'error', @@ -358,7 +363,7 @@ t.test('loading as main will load the cli', t => { }) t.test('set process.title', t => { - const { execPath, argv: processArgv } = process + const { argv: processArgv } = process const { log } = console const titleDesc = Object.getOwnPropertyDescriptor(process, 'title') Object.defineProperty(process, 'title', { diff --git a/deps/npm/test/lib/outdated.js b/deps/npm/test/lib/outdated.js index 0cba04d5471304..7a5bd8f0ef870c 100644 --- a/deps/npm/test/lib/outdated.js +++ b/deps/npm/test/lib/outdated.js @@ -6,58 +6,57 @@ const packument = spec => { alpha: { name: 'alpha', 'dist-tags': { - latest: '1.0.1' + latest: '1.0.1', }, versions: { '1.0.1': { version: '1.0.1', dependencies: { - gamma: '2.0.0' - } - } - } + gamma: '2.0.0', + }, + }, + }, }, beta: { name: 'beta', 'dist-tags': { - latest: '1.0.1' + latest: '1.0.1', }, versions: { '1.0.1': { - version: '1.0.1' - } - } + version: '1.0.1', + }, + }, }, gamma: { name: 'gamma', 'dist-tags': { - latest: '2.0.0' + latest: '2.0.0', }, versions: { '1.0.1': { - version: '1.0.1' + version: '1.0.1', }, '2.0.0': { - version: '2.0.0' - } - } + version: '2.0.0', + }, + }, }, theta: { name: 'theta', 'dist-tags': { - latest: '1.0.1' + latest: '1.0.1', }, versions: { '1.0.1': { - version: '1.0.1' - } - } - } + version: '1.0.1', + }, + }, + }, } - if (spec.name === 'eta') { + if (spec.name === 'eta') throw new Error('There is an error with this package.') - } if (!mocks[spec.name]) { const err = new Error() @@ -87,10 +86,10 @@ const globalDir = t.testdir({ alpha: { 'package.json': JSON.stringify({ name: 'alpha', - version: '1.0.0' - }, null, 2) - } - } + version: '1.0.0', + }, null, 2), + }, + }, }) const outdated = (dir, opts) => requireInject( @@ -99,11 +98,11 @@ const outdated = (dir, opts) => requireInject( '../../lib/npm.js': { prefix: dir, globalDir: `${globalDir}/node_modules`, - flatOptions: opts + flatOptions: opts, }, pacote: { - packument - } + packument, + }, } ) @@ -127,14 +126,14 @@ t.test('should display outdated deps', t => { dependencies: { alpha: '^1.0.0', gamma: '^1.0.0', - theta: '^1.0.0' + theta: '^1.0.0', }, devDependencies: { - zeta: '^1.0.0' + zeta: '^1.0.0', }, peerDependencies: { - beta: '^1.0.0' - } + beta: '^1.0.0', + }, }, null, 2), node_modules: { alpha: { @@ -142,42 +141,42 @@ t.test('should display outdated deps', t => { name: 'alpha', version: '1.0.0', dependencies: { - gamma: '2.0.0' - } + gamma: '2.0.0', + }, }, null, 2), node_modules: { gamma: { 'package.json': JSON.stringify({ name: 'gamma', - version: '2.0.0' - }, null, 2) - } - } + version: '2.0.0', + }, null, 2), + }, + }, }, beta: { 'package.json': JSON.stringify({ name: 'beta', - version: '1.0.0' - }, null, 2) + version: '1.0.0', + }, null, 2), }, gamma: { 'package.json': JSON.stringify({ name: 'gamma', - version: '1.0.1' - }, null, 2) + version: '1.0.1', + }, null, 2), }, zeta: { 'package.json': JSON.stringify({ name: 'zeta', - version: '1.0.0' - }, null, 2) - } - } + version: '1.0.0', + }, null, 2), + }, + }, }) t.test('outdated global', t => { outdated(null, { - global: true + global: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -187,7 +186,7 @@ t.test('should display outdated deps', t => { t.test('outdated', t => { outdated(testDir, { global: false, - color: true + color: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -198,7 +197,7 @@ t.test('should display outdated deps', t => { outdated(testDir, { global: false, color: true, - omit: ['dev'] + omit: ['dev'], })([], () => { t.matchSnapshot(logs) t.end() @@ -209,7 +208,7 @@ t.test('should display outdated deps', t => { outdated(testDir, { global: false, color: true, - omit: ['dev', 'peer'] + omit: ['dev', 'peer'], })([], () => { t.matchSnapshot(logs) t.end() @@ -220,7 +219,7 @@ t.test('should display outdated deps', t => { outdated(testDir, { global: false, color: true, - omit: ['prod'] + omit: ['prod'], })([], () => { t.matchSnapshot(logs) t.end() @@ -230,7 +229,7 @@ t.test('should display outdated deps', t => { t.test('outdated --long', t => { outdated(testDir, { global: false, - long: true + long: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -240,7 +239,7 @@ t.test('should display outdated deps', t => { t.test('outdated --json', t => { outdated(testDir, { global: false, - json: true + json: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -251,7 +250,7 @@ t.test('should display outdated deps', t => { outdated(testDir, { global: false, json: true, - long: true + long: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -261,7 +260,7 @@ t.test('should display outdated deps', t => { t.test('outdated --parseable', t => { outdated(testDir, { global: false, - parseable: true + parseable: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -272,7 +271,7 @@ t.test('should display outdated deps', t => { outdated(testDir, { global: false, parseable: true, - long: true + long: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -281,7 +280,7 @@ t.test('should display outdated deps', t => { t.test('outdated --all', t => { outdated(testDir, { - all: true + all: true, })([], () => { t.matchSnapshot(logs) t.end() @@ -290,7 +289,7 @@ t.test('should display outdated deps', t => { t.test('outdated specific dep', t => { outdated(testDir, { - global: false + global: false, })(['alpha'], () => { t.matchSnapshot(logs) t.end() @@ -306,21 +305,21 @@ t.test('should return if no outdated deps', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: '^1.0.0' - } + alpha: '^1.0.0', + }, }, null, 2), node_modules: { alpha: { 'package.json': JSON.stringify({ name: 'alpha', - version: '1.0.1' - }, null, 2) - } - } + version: '1.0.1', + }, null, 2), + }, + }, }) outdated(testDir, { - global: false + global: false, })([], () => { t.equals(logs.length, 0, 'no logs') t.end() @@ -333,21 +332,21 @@ t.test('throws if error with a dep', t => { name: 'delta', version: '1.0.0', dependencies: { - eta: '^1.0.0' - } + eta: '^1.0.0', + }, }, null, 2), node_modules: { eta: { 'package.json': JSON.stringify({ name: 'eta', - version: '1.0.1' - }, null, 2) - } - } + version: '1.0.1', + }, null, 2), + }, + }, }) outdated(testDir, { - global: false + global: false, })([], (err) => { t.equals(err.message, 'There is an error with this package.') t.end() @@ -360,14 +359,14 @@ t.test('should skip missing non-prod deps', t => { name: 'delta', version: '1.0.0', devDependencies: { - beta: '^1.0.0' - } + beta: '^1.0.0', + }, }, null, 2), - node_modules: {} + node_modules: {}, }) outdated(testDir, { - global: false + global: false, })([], () => { t.equals(logs.length, 0, 'no logs') t.end() @@ -380,17 +379,17 @@ t.test('should skip invalid pkg ranges', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: '>=^2' - } + alpha: '>=^2', + }, }, null, 2), node_modules: { alpha: { 'package.json': JSON.stringify({ name: 'alpha', - version: '1.0.0' - }, null, 2) - } - } + version: '1.0.0', + }, null, 2), + }, + }, }) outdated(testDir, {})([], () => { @@ -405,17 +404,17 @@ t.test('should skip git specs', t => { name: 'delta', version: '1.0.0', dependencies: { - alpha: 'github:username/foo' - } + alpha: 'github:username/foo', + }, }, null, 2), node_modules: { alpha: { 'package.json': JSON.stringify({ name: 'alpha', - version: '1.0.0' - }, null, 2) - } - } + version: '1.0.0', + }, null, 2), + }, + }, }) outdated(testDir, {})([], () => { diff --git a/deps/npm/test/lib/owner.js b/deps/npm/test/lib/owner.js index dc179e4662028c..e217533f0de241 100644 --- a/deps/npm/test/lib/owner.js +++ b/deps/npm/test/lib/owner.js @@ -16,17 +16,19 @@ const mocks = { 'npm-registry-fetch': npmFetch, pacote, '../../lib/npm.js': npm, - '../../lib/utils/output.js': (...msg) => { result += msg.join('\n') }, + '../../lib/utils/output.js': (...msg) => { + result += msg.join('\n') + }, '../../lib/utils/otplease.js': async (opts, fn) => fn({ otp: '123456', opts }), '../../lib/utils/read-local-package.js': async () => readLocalPkgResponse, - '../../lib/utils/usage.js': () => 'usage instructions' + '../../lib/utils/usage.js': () => 'usage instructions', } const npmcliMaintainers = [ { email: 'quitlahok@gmail.com', name: 'nlf' }, { email: 'ruyadorno@hotmail.com', name: 'ruyadorno' }, { email: 'darcy@darcyclarke.me', name: 'darcyclarke' }, - { email: 'i@izs.me', name: 'isaacs' } + { email: 'i@izs.me', name: 'isaacs' }, ] const owner = requireInject('../../lib/owner.js', mocks) @@ -59,7 +61,7 @@ t.test('owner ls no args', t => { opts, { ...npm.flatOptions, - fullMetadata: true + fullMetadata: true, }, 'should forward expected options to pacote.packument' ) @@ -132,7 +134,7 @@ t.test('owner ls ', t => { opts, { ...npm.flatOptions, - fullMetadata: true + fullMetadata: true, }, 'should forward expected options to pacote.packument' ) @@ -178,7 +180,7 @@ t.test('owner add ', t => { return { _id: 'org.couchdb.user:foo', email: 'foo@github.com', - name: 'foo' + name: 'foo', } } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { t.ok('should put changed owner') @@ -187,12 +189,12 @@ t.test('owner add ', t => { method: 'PUT', body: { _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }, otp: '123456', spec: { - name: '@npmcli/map-workspaces' - } + name: '@npmcli/map-workspaces', + }, }, 'should use expected opts') t.deepEqual( opts.body.maintainers, @@ -200,15 +202,14 @@ t.test('owner add ', t => { ...npmcliMaintainers, { name: 'foo', - email: 'foo@github.com' - } + email: 'foo@github.com', + }, ], 'should contain expected new owners, adding requested user' ) return {} - } else { + } else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => { t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') @@ -216,13 +217,13 @@ t.test('owner add ', t => { opts, { ...npm.flatOptions, - fullMetadata: true + fullMetadata: true, }, 'should forward expected options to pacote.packument' ) return { _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, } } t.teardown(() => { @@ -246,17 +247,16 @@ t.test('owner add cwd package', t => { return { _id: 'org.couchdb.user:foo', email: 'foo@github.com', - name: 'foo' + name: 'foo', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') return {} - } else { + else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }) t.teardown(() => { result = '' @@ -290,16 +290,15 @@ t.test('owner add already an owner', t => { return { _id: 'org.couchdb.user:ruyadorno', email: 'ruyadorno@hotmail.com', - name: 'ruyadorno' + name: 'ruyadorno', } - } else { + } else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => { return { _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, } } t.teardown(() => { @@ -319,17 +318,16 @@ t.test('owner add fails to retrieve user', t => { readLocalPkgResponse = npmFetch.json = async (uri, opts) => { // retrieve borked user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { + if (uri === '/-/user/org.couchdb.user:foo') return { ok: false } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { + else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') return {} - } else { + else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }) t.teardown(() => { result = '' @@ -357,22 +355,21 @@ t.test('owner add fails to PUT updates', t => { return { _id: 'org.couchdb.user:foo', email: 'foo@github.com', - name: 'foo' + name: 'foo', } } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { return { error: { status: '418', - message: "I'm a teapot" - } + message: "I'm a teapot", + }, } - } else { + } else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }) t.teardown(() => { result = '' @@ -406,13 +403,12 @@ t.test('owner add fails to retrieve user info', t => { new Error("I'm a teapot"), { status: 418 } ) - } else { + } else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }) t.teardown(() => { result = '' @@ -438,18 +434,17 @@ t.test('owner add no previous maintainers property from server', t return { _id: 'org.couchdb.user:foo', email: 'foo@github.com', - name: 'foo' + name: 'foo', } - } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') { + } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') return {} - } else { + else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => { return { _rev: '1-foobaaa1', - maintainers: null + maintainers: null, } } t.teardown(() => { @@ -509,7 +504,7 @@ t.test('owner rm ', t => { return { _id: 'org.couchdb.user:ruyadorno', email: 'ruyadorno@hotmail.com', - name: 'ruyadorno' + name: 'ruyadorno', } } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { t.ok('should put changed owner') @@ -517,12 +512,12 @@ t.test('owner rm ', t => { ...npm.flatOptions, method: 'PUT', body: { - _rev: '1-foobaaa1' + _rev: '1-foobaaa1', }, otp: '123456', spec: { - name: '@npmcli/map-workspaces' - } + name: '@npmcli/map-workspaces', + }, }, 'should use expected opts') t.deepEqual( opts.body.maintainers, @@ -530,9 +525,8 @@ t.test('owner rm ', t => { 'should contain expected new owners, removing requested user' ) return {} - } else { + } else t.fail(`unexpected fetch json call to: ${uri}`) - } } pacote.packument = async (spec, opts) => { t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') @@ -540,13 +534,13 @@ t.test('owner rm ', t => { opts, { ...npm.flatOptions, - fullMetadata: true + fullMetadata: true, }, 'should forward expected options to pacote.packument' ) return { _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, } } t.teardown(() => { @@ -575,18 +569,17 @@ t.test('owner rm not a current owner', t => { return { _id: 'org.couchdb.user:foo', email: 'foo@github.com', - name: 'foo' + name: 'foo', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') return {} - } else { + else t.fail(`unexpected fetch json call to: ${uri}`) - } } pacote.packument = async (spec, opts) => { return { _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, } } t.teardown(() => { @@ -610,17 +603,16 @@ t.test('owner rm cwd package', t => { return { _id: 'org.couchdb.user:ruyadorno', email: 'ruyadorno@hotmail.com', - name: 'ruyadorno' + name: 'ruyadorno', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') return {} - } else { + else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, }) t.teardown(() => { result = '' @@ -645,18 +637,17 @@ t.test('owner rm only user', t => { return { _id: 'org.couchdb.user:ruyadorno', email: 'ruyadorno@hotmail.com', - name: 'ruyadorno' + name: 'ruyadorno', } - } else { + } else t.fail(`unexpected fetch json call to uri: ${uri}`) - } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', maintainers: [{ name: 'ruyadorno', - email: 'ruyadorno@hotmail.com' - }] + email: 'ruyadorno@hotmail.com', + }], }) t.teardown(() => { result = '' @@ -722,7 +713,7 @@ t.test('completion', t => { testComp(['npm', 'owner'], [ 'add', 'rm', - 'ls' + 'ls', ]) testComp(['npm', 'owner', 'add'], []) testComp(['npm', 'owner', 'ls'], []) @@ -735,7 +726,7 @@ t.test('completion', t => { pacote.packument = async spec => { t.equal(spec.name, readLocalPkgResponse, 'should use package spec') return { - maintainers: npmcliMaintainers + maintainers: npmcliMaintainers, } } t.teardown(() => { @@ -751,7 +742,7 @@ t.test('completion', t => { 'nlf', 'ruyadorno', 'darcyclarke', - 'isaacs' + 'isaacs', ], 'should return list of current owners' ) @@ -772,7 +763,7 @@ t.test('completion', t => { pacote.packument = async spec => { t.equal(spec.name, readLocalPkgResponse, 'should use package spec') return { - maintainers: [] + maintainers: [], } } t.teardown(() => { diff --git a/deps/npm/test/lib/pack.js b/deps/npm/test/lib/pack.js index 097204ea92bea3..851174d259cb35 100644 --- a/deps/npm/test/lib/pack.js +++ b/deps/npm/test/lib/pack.js @@ -4,11 +4,10 @@ const requireInject = require('require-inject') const OUTPUT = [] const output = (...msg) => OUTPUT.push(msg) -const libnpmpackActual = require('libnpmpack') const libnpmpack = async (spec, opts) => { - if (!opts) { + if (!opts) throw new Error('expected options object') - } + return '' } @@ -24,21 +23,21 @@ t.test('should pack current directory with no arguments', (t) => { flatOptions: { unicode: false, json: false, - dryRun: false - } + dryRun: false, + }, }, libnpmpack, npmlog: { notice: () => {}, showProgress: () => {}, - clearProgress: () => {} - } + clearProgress: () => {}, + }, }) return pack([], er => { - if (er) { + if (er) throw er - } + const filename = `npm-${require('../../package.json').version}.tgz` t.strictSame(OUTPUT, [[filename]]) }) @@ -48,8 +47,8 @@ t.test('should pack given directory', (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: 'my-cool-pkg', - version: '1.0.0' - }, null, 2) + version: '1.0.0', + }, null, 2), }) const pack = requireInject('../../lib/pack.js', { @@ -58,21 +57,21 @@ t.test('should pack given directory', (t) => { flatOptions: { unicode: true, json: true, - dryRun: true - } + dryRun: true, + }, }, libnpmpack, npmlog: { notice: () => {}, - 'showProgress': () => {}, - 'clearProgress': () => {} - } + showProgress: () => {}, + clearProgress: () => {}, + }, }) return pack([testDir], er => { - if (er) { + if (er) throw er - } + const filename = 'my-cool-pkg-1.0.0.tgz' t.strictSame(OUTPUT, [[filename]]) }) @@ -82,8 +81,8 @@ t.test('should pack given directory for scoped package', (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: '@cool/my-pkg', - version: '1.0.0' - }, null, 2) + version: '1.0.0', + }, null, 2), }) const pack = requireInject('../../lib/pack.js', { @@ -92,21 +91,21 @@ t.test('should pack given directory for scoped package', (t) => { flatOptions: { unicode: true, json: true, - dryRun: true - } + dryRun: true, + }, }, libnpmpack, npmlog: { notice: () => {}, - 'showProgress': () => {}, - 'clearProgress': () => {} - } + showProgress: () => {}, + clearProgress: () => {}, + }, }) return pack([testDir], er => { - if (er) { + if (er) throw er - } + const filename = 'cool-my-pkg-1.0.0.tgz' t.strictSame(OUTPUT, [[filename]]) }) @@ -119,27 +118,27 @@ t.test('should log pack contents', (t) => { ...require('../../lib/utils/tar.js'), logTar: () => { t.ok(true, 'logTar is called') - } + }, }, '../../lib/npm.js': { flatOptions: { unicode: false, json: false, - dryRun: false - } + dryRun: false, + }, }, libnpmpack, npmlog: { notice: () => {}, - 'showProgress': () => {}, - 'clearProgress': () => {} - } + showProgress: () => {}, + clearProgress: () => {}, + }, }) return pack([], er => { - if (er) { + if (er) throw er - } + const filename = `npm-${require('../../package.json').version}.tgz` t.strictSame(OUTPUT, [[filename]]) }) diff --git a/deps/npm/test/lib/ping.js b/deps/npm/test/lib/ping.js index d27bf4d603cf11..a185919dddc332 100644 --- a/deps/npm/test/lib/ping.js +++ b/deps/npm/test/lib/ping.js @@ -22,8 +22,8 @@ test('pings', (t) => { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') } - } - } + }, + }, }) ping([], (err) => { @@ -51,7 +51,7 @@ test('pings and logs details', (t) => { if (noticeCalls === 1) { t.equal(type, 'PING', 'should log a PING') t.equal(spec, flatOptions.registry, 'should log the registry url') - } else if (noticeCalls == 2) { + } else if (noticeCalls === 2) { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') } else { @@ -59,8 +59,8 @@ test('pings and logs details', (t) => { const parsed = JSON.parse(spec) t.match(parsed, details, 'should log JSON stringified details') } - } - } + }, + }, }) ping([], (err) => { @@ -98,8 +98,8 @@ test('pings and returns json', (t) => { t.equal(type, 'PONG', 'should log a PONG') t.match(spec, /\d+ms/, 'should log the elapsed milliseconds') } - } - } + }, + }, }) ping([], (err) => { diff --git a/deps/npm/test/lib/prefix.js b/deps/npm/test/lib/prefix.js index a6e4d731ab1871..83e2d636808596 100644 --- a/deps/npm/test/lib/prefix.js +++ b/deps/npm/test/lib/prefix.js @@ -9,7 +9,7 @@ test('prefix', (t) => { '../../lib/npm.js': { prefix: dir }, '../../lib/utils/output.js': (output) => { t.equal(output, dir, 'prints the correct directory') - } + }, }) prefix([], (err) => { diff --git a/deps/npm/test/lib/prune.js b/deps/npm/test/lib/prune.js index 27c16355fa6412..074f4eac6eeee2 100644 --- a/deps/npm/test/lib/prune.js +++ b/deps/npm/test/lib/prune.js @@ -1,5 +1,4 @@ const { test } = require('tap') -const prune = require('../../lib/prune.js') const requireInject = require('require-inject') test('should prune using Arborist', (t) => { @@ -7,8 +6,8 @@ test('should prune using Arborist', (t) => { '../../lib/npm.js': { prefix: 'foo', flatOptions: { - 'foo': 'bar' - } + foo: 'bar', + }, }, '@npmcli/arborist': function (args) { t.ok(args, 'gets options object') @@ -17,13 +16,14 @@ test('should prune using Arborist', (t) => { t.ok(true, 'prune is called') } }, - '../../lib/utils/reify-output.js': (arb) => { + '../../lib/utils/reify-finish.js': (arb) => { t.ok(arb, 'gets arborist tree') - } + }, }) - prune(null, () => { + prune(null, er => { + if (er) + throw er t.ok(true, 'callback is called') t.end() }) }) - diff --git a/deps/npm/test/lib/repo.js b/deps/npm/test/lib/repo.js index fcc95f343890eb..c4b1b46e7342b7 100644 --- a/deps/npm/test/lib/repo.js +++ b/deps/npm/test/lib/repo.js @@ -5,105 +5,105 @@ const pacote = { manifest: async (spec, options) => { return spec === 'norepo' ? { name: 'norepo', - version: '1.2.3' + version: '1.2.3', } - : spec === 'repoobbj-nourl' ? { - name: 'repoobj-nourl', - repository: { no: 'url' } - } + : spec === 'repoobbj-nourl' ? { + name: 'repoobj-nourl', + repository: { no: 'url' }, + } - : spec === 'hostedgit' ? { - repository: 'git://github.com/foo/hostedgit' - } - : spec === 'hostedgitat' ? { - repository: 'git@github.com:foo/hostedgitat' - } - : spec === 'hostedssh' ? { - repository: 'ssh://git@github.com/foo/hostedssh' - } - : spec === 'hostedgitssh' ? { - repository: 'git+ssh://git@github.com/foo/hostedgitssh' - } - : spec === 'hostedgithttp' ? { - repository: 'git+http://github.com/foo/hostedgithttp' - } - : spec === 'hostedgithttps' ? { - repository: 'git+https://github.com/foo/hostedgithttps' - } + : spec === 'hostedgit' ? { + repository: 'git://github.com/foo/hostedgit', + } + : spec === 'hostedgitat' ? { + repository: 'git@github.com:foo/hostedgitat', + } + : spec === 'hostedssh' ? { + repository: 'ssh://git@github.com/foo/hostedssh', + } + : spec === 'hostedgitssh' ? { + repository: 'git+ssh://git@github.com/foo/hostedgitssh', + } + : spec === 'hostedgithttp' ? { + repository: 'git+http://github.com/foo/hostedgithttp', + } + : spec === 'hostedgithttps' ? { + repository: 'git+https://github.com/foo/hostedgithttps', + } - : spec === 'hostedgitobj' ? { - repository: { url: 'git://github.com/foo/hostedgitobj' } - } - : spec === 'hostedgitatobj' ? { - repository: { url: 'git@github.com:foo/hostedgitatobj' } - } - : spec === 'hostedsshobj' ? { - repository: { url: 'ssh://git@github.com/foo/hostedsshobj' } - } - : spec === 'hostedgitsshobj' ? { - repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' } - } - : spec === 'hostedgithttpobj' ? { - repository: { url: 'git+http://github.com/foo/hostedgithttpobj' } - } - : spec === 'hostedgithttpsobj' ? { - repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' } - } + : spec === 'hostedgitobj' ? { + repository: { url: 'git://github.com/foo/hostedgitobj' }, + } + : spec === 'hostedgitatobj' ? { + repository: { url: 'git@github.com:foo/hostedgitatobj' }, + } + : spec === 'hostedsshobj' ? { + repository: { url: 'ssh://git@github.com/foo/hostedsshobj' }, + } + : spec === 'hostedgitsshobj' ? { + repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' }, + } + : spec === 'hostedgithttpobj' ? { + repository: { url: 'git+http://github.com/foo/hostedgithttpobj' }, + } + : spec === 'hostedgithttpsobj' ? { + repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' }, + } - : spec === 'unhostedgit' ? { - repository: 'git://gothib.com/foo/unhostedgit' - } - : spec === 'unhostedgitat' ? { - repository: 'git@gothib.com:foo/unhostedgitat' - } - : spec === 'unhostedssh' ? { - repository: 'ssh://git@gothib.com/foo/unhostedssh' - } - : spec === 'unhostedgitssh' ? { - repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh' - } - : spec === 'unhostedgithttp' ? { - repository: 'git+http://gothib.com/foo/unhostedgithttp' - } - : spec === 'unhostedgithttps' ? { - repository: 'git+https://gothib.com/foo/unhostedgithttps' - } + : spec === 'unhostedgit' ? { + repository: 'git://gothib.com/foo/unhostedgit', + } + : spec === 'unhostedgitat' ? { + repository: 'git@gothib.com:foo/unhostedgitat', + } + : spec === 'unhostedssh' ? { + repository: 'ssh://git@gothib.com/foo/unhostedssh', + } + : spec === 'unhostedgitssh' ? { + repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh', + } + : spec === 'unhostedgithttp' ? { + repository: 'git+http://gothib.com/foo/unhostedgithttp', + } + : spec === 'unhostedgithttps' ? { + repository: 'git+https://gothib.com/foo/unhostedgithttps', + } - : spec === 'unhostedgitobj' ? { - repository: { url: 'git://gothib.com/foo/unhostedgitobj' } - } - : spec === 'unhostedgitatobj' ? { - repository: { url: 'git@gothib.com:foo/unhostedgitatobj' } - } - : spec === 'unhostedsshobj' ? { - repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' } - } - : spec === 'unhostedgitsshobj' ? { - repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' } - } - : spec === 'unhostedgithttpobj' ? { - repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' } - } - : spec === 'unhostedgithttpsobj' ? { - repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' } - } + : spec === 'unhostedgitobj' ? { + repository: { url: 'git://gothib.com/foo/unhostedgitobj' }, + } + : spec === 'unhostedgitatobj' ? { + repository: { url: 'git@gothib.com:foo/unhostedgitatobj' }, + } + : spec === 'unhostedsshobj' ? { + repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' }, + } + : spec === 'unhostedgitsshobj' ? { + repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' }, + } + : spec === 'unhostedgithttpobj' ? { + repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' }, + } + : spec === 'unhostedgithttpsobj' ? { + repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' }, + } - : spec === 'directory' ? { - repository: { - type: 'git', - url: 'git+https://github.com/foo/test-repo-with-directory.git', - directory: 'some/directory' + : spec === 'directory' ? { + repository: { + type: 'git', + url: 'git+https://github.com/foo/test-repo-with-directory.git', + directory: 'some/directory', + }, } - } - : spec === '.' ? { - name: 'thispkg', - version: '1.2.3', - repository: 'https://example.com/thispkg.git' - } - : null - } + : spec === '.' ? { + name: 'thispkg', + version: '1.2.3', + repository: 'https://example.com/thispkg.git', + } + : null + }, } // keep a tally of which urls got opened @@ -116,7 +116,7 @@ const openUrl = (url, errMsg, cb) => { const repo = requireInject('../../lib/repo.js', { pacote, - '../../lib/utils/open-url.js': openUrl + '../../lib/utils/open-url.js': openUrl, }) t.test('completion', t => { @@ -152,7 +152,7 @@ t.test('open repo urls', t => { unhostedgithttpobj: 'http://gothib.com/foo/unhostedgithttpobj', unhostedgithttpsobj: 'https://gothib.com/foo/unhostedgithttpsobj', directory: 'https://github.com/foo/test-repo-with-directory/tree/master/some/directory', - '.': 'https://example.com/thispkg' + '.': 'https://example.com/thispkg', } const keys = Object.keys(expect) t.plan(keys.length) @@ -174,7 +174,7 @@ t.test('fail if cannot figure out repo url', t => { 'norepo', 'repoobbj-nourl', 'unhostedgitat', - 'unhostedgitatobj' + 'unhostedgitatobj', ] t.plan(cases.length) diff --git a/deps/npm/test/lib/root.js b/deps/npm/test/lib/root.js index 210e9b029121c8..8c23152b3efca9 100644 --- a/deps/npm/test/lib/root.js +++ b/deps/npm/test/lib/root.js @@ -9,7 +9,7 @@ test('root', (t) => { '../../lib/npm.js': { dir }, '../../lib/utils/output.js': (output) => { t.equal(output, dir, 'prints the correct directory') - } + }, }) root([], (err) => { diff --git a/deps/npm/test/lib/run-script.js b/deps/npm/test/lib/run-script.js index 7ddb6ff6f63a56..bad8a63c0d7783 100644 --- a/deps/npm/test/lib/run-script.js +++ b/deps/npm/test/lib/run-script.js @@ -1,24 +1,23 @@ const t = require('tap') const requireInject = require('require-inject') -let RUN_FAIL = null const RUN_SCRIPTS = [] const npm = { localPrefix: __dirname, flatOptions: { scriptShell: undefined, json: false, - parseable: false + parseable: false, }, config: { settings: { - 'if-present': false + 'if-present': false, }, get: k => npm.config.settings[k], set: (k, v) => { npm.config.settings[k] = v - } - } + }, + }, } const output = [] @@ -33,7 +32,7 @@ const getRS = windows => requireInject('../../lib/run-script.js', { npmlog, '../../lib/npm.js': npm, '../../lib/utils/is-windows-shell.js': windows, - '../../lib/utils/output.js': (...msg) => output.push(msg) + '../../lib/utils/output.js': (...msg) => output.push(msg), }) const runScript = getRS(false) @@ -44,41 +43,41 @@ t.test('completion', t => { const dir = t.testdir() npm.localPrefix = dir t.test('already have a script name', t => { - runScript.completion({conf:{argv:{remain: ['npm','run','x']}}}, (er, results) => { - if (er) { + runScript.completion({conf: {argv: {remain: ['npm', 'run', 'x']}}}, (er, results) => { + if (er) throw er - } + t.equal(results, undefined) t.end() }) }) t.test('no package.json', t => { - runScript.completion({conf:{argv:{remain: ['npm','run']}}}, (er, results) => { - if (er) { + runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => { + if (er) throw er - } + t.strictSame(results, []) t.end() }) }) t.test('has package.json, no scripts', t => { writeFileSync(`${dir}/package.json`, JSON.stringify({})) - runScript.completion({conf:{argv:{remain: ['npm', 'run']}}}, (er, results) => { - if (er) { + runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => { + if (er) throw er - } + t.strictSame(results, []) t.end() }) }) t.test('has package.json, with scripts', t => { writeFileSync(`${dir}/package.json`, JSON.stringify({ - scripts: { hello: 'echo hello', world: 'echo world' } + scripts: { hello: 'echo hello', world: 'echo world' }, })) - runScript.completion({conf:{argv:{remain: ['npm', 'run']}}}, (er, results) => { - if (er) { + runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => { + if (er) throw er - } + t.strictSame(results, ['hello', 'world']) t.end() }) @@ -99,9 +98,9 @@ t.test('default env, start, and restart scripts', async t => { }) await runScript(['start'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { path: npm.localPrefix, @@ -110,16 +109,16 @@ t.test('default env, start, and restart scripts', async t => { stdio: 'inherit', stdioString: true, pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {}}, - event: 'start' - } + event: 'start', + }, ]) }) RUN_SCRIPTS.length = 0 await runScript(['env'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { path: npm.localPrefix, @@ -127,19 +126,22 @@ t.test('default env, start, and restart scripts', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - env: 'env' - } }, - event: 'env' - } + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'env', + } }, + event: 'env', + }, ]) }) RUN_SCRIPTS.length = 0 await runScriptWin(['env'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { path: npm.localPrefix, @@ -147,19 +149,22 @@ t.test('default env, start, and restart scripts', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - env: 'SET' - } }, - event: 'env' - } + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'SET', + } }, + event: 'env', + }, ]) }) RUN_SCRIPTS.length = 0 await runScript(['restart'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { path: npm.localPrefix, @@ -167,11 +172,14 @@ t.test('default env, start, and restart scripts', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - restart: 'npm stop --if-present && npm start' - } }, - event: 'restart' - } + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + restart: 'npm stop --if-present && npm start', + } }, + event: 'restart', + }, ]) }) RUN_SCRIPTS.length = 0 @@ -180,29 +188,29 @@ t.test('default env, start, and restart scripts', async t => { t.test('try to run missing script', t => { npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ - scripts: { hello: 'world' } - }) + scripts: { hello: 'world' }, + }), }) t.test('no suggestions', async t => { await runScript(['notevenclose'], er => { t.match(er, { - message: 'missing script: notevenclose' + message: 'missing script: notevenclose', }) }) }) t.test('suggestions', async t => { await runScript(['helo'], er => { t.match(er, { - message: 'missing script: helo\n\nDid you mean this?\n hello' + message: 'missing script: helo\n\nDid you mean this?\n hello', }) }) }) t.test('with --if-present', async t => { npm.config.set('if-present', true) await runScript(['goodbye'], er => { - if (er) { + if (er) throw er - } + t.strictSame(RUN_SCRIPTS, [], 'did not try to run anything') }) }) @@ -216,15 +224,15 @@ t.test('run pre/post hooks', async t => { version: '1.2.3', scripts: { preenv: 'echo before the env', - postenv: 'echo after the env' - } - }) + postenv: 'echo after the env', + }, + }), }) await runScript(['env'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { event: 'preenv' }, { @@ -233,12 +241,15 @@ t.test('run pre/post hooks', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - env: 'env' - } }, - event: 'env' + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'env', + } }, + event: 'env', }, - { event: 'postenv' } + { event: 'postenv' }, ]) }) RUN_SCRIPTS.length = 0 @@ -253,15 +264,15 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => { version: '1.2.3', scripts: { preenv: 'echo before the env', - postenv: 'echo after the env' - } - }) + postenv: 'echo after the env', + }, + }), }) await runScript(['env'], er => { - if (er) { + if (er) throw er - } + t.deepEqual(RUN_SCRIPTS, [ { path: npm.localPrefix, @@ -269,14 +280,17 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - preenv: 'echo before the env', - postenv: 'echo after the env', - env: 'env' - } }, + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + preenv: 'echo before the env', + postenv: 'echo after the env', + env: 'env', + } }, banner: true, - event: 'env' - } + event: 'env', + }, ]) delete npm.flatOptions.ignoreScripts @@ -286,7 +300,9 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => { t.test('run silent', async t => { npmlog.level = 'silent' - t.teardown(() => { npmlog.level = 'warn' }) + t.teardown(() => { + npmlog.level = 'warn' + }) npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ @@ -294,19 +310,19 @@ t.test('run silent', async t => { version: '1.2.3', scripts: { preenv: 'echo before the env', - postenv: 'echo after the env' - } - }) + postenv: 'echo after the env', + }, + }), }) await runScript(['env'], er => { - if (er) { + if (er) throw er - } + t.match(RUN_SCRIPTS, [ { event: 'preenv', - stdio: 'inherit' + stdio: 'inherit', }, { path: npm.localPrefix, @@ -314,16 +330,19 @@ t.test('run silent', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { - env: 'env' - } }, + pkg: { name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'env', + } }, event: 'env', - banner: false + banner: false, }, { event: 'postenv', - stdio: 'inherit' - } + stdio: 'inherit', + }, ]) }) RUN_SCRIPTS.length = 0 @@ -335,46 +354,43 @@ t.test('list scripts', async t => { start: 'node server.js', stop: 'node kill-server.js', preenv: 'echo before the env', - postenv: 'echo after the env' + postenv: 'echo after the env', } npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ name: 'x', version: '1.2.3', - scripts - }) + scripts, + }), }) await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [ - [ 'Lifecycle scripts included in x:' ], - [ ' test\n exit 2' ], - [ ' start\n node server.js' ], - [ ' stop\n node kill-server.js' ], - [ '\navailable via `npm run-script`:' ], - [ ' preenv\n echo before the env' ], - [ ' postenv\n echo after the env' ] + ['Lifecycle scripts included in x:'], + [' test\n exit 2'], + [' start\n node server.js'], + [' stop\n node kill-server.js'], + ['\navailable via `npm run-script`:'], + [' preenv\n echo before the env'], + [' postenv\n echo after the env'], ], 'basic report') output.length = 0 npmlog.level = 'silent' await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, []) npmlog.level = 'warn' npm.flatOptions.json = true await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [[JSON.stringify(scripts, 0, 2)]], 'json report') output.length = 0 @@ -382,16 +398,15 @@ t.test('list scripts', async t => { npm.flatOptions.parseable = true await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [ - [ 'test:exit 2' ], - [ 'start:node server.js' ], - [ 'stop:node kill-server.js' ], - [ 'preenv:echo before the env' ], - [ 'postenv:echo after the env' ] + ['test:exit 2'], + ['start:node server.js'], + ['stop:node kill-server.js'], + ['preenv:echo before the env'], + ['postenv:echo after the env'], ]) output.length = 0 npm.flatOptions.parseable = false @@ -401,14 +416,13 @@ t.test('list scripts when no scripts', async t => { npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({ name: 'x', - version: '1.2.3' - }) + version: '1.2.3', + }), }) await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [], 'nothing to report') output.length = 0 @@ -419,18 +433,17 @@ t.test('list scripts, only commands', async t => { 'package.json': JSON.stringify({ name: 'x', version: '1.2.3', - scripts: { preversion: 'echo doing the version dance' } - }) + scripts: { preversion: 'echo doing the version dance' }, + }), }) await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [ - ["Lifecycle scripts included in x:"], - [" preversion\n echo doing the version dance"], + ['Lifecycle scripts included in x:'], + [' preversion\n echo doing the version dance'], ]) output.length = 0 }) @@ -440,18 +453,17 @@ t.test('list scripts, only non-commands', async t => { 'package.json': JSON.stringify({ name: 'x', version: '1.2.3', - scripts: { glorp: 'echo doing the glerp glop' } - }) + scripts: { glorp: 'echo doing the glerp glop' }, + }), }) await runScript([], er => { - if (er) { + if (er) throw er - } }) t.strictSame(output, [ - ["Scripts available in x via `npm run-script`:"], - [" glorp\n echo doing the glerp glop"] + ['Scripts available in x via `npm run-script`:'], + [' glorp\n echo doing the glerp glop'], ]) output.length = 0 }) diff --git a/deps/npm/test/lib/test.js b/deps/npm/test/lib/test.js index 8b6d0662659cfe..9a44e4760a2a51 100644 --- a/deps/npm/test/lib/test.js +++ b/deps/npm/test/lib/test.js @@ -6,11 +6,11 @@ const npmock = { run: (args, cb) => { RUN_ARGS = args cb() - } - } + }, + }, } const test = requireInject('../../lib/test.js', { - '../../lib/npm.js': npmock + '../../lib/npm.js': npmock, }) t.test('run a test', t => { @@ -22,7 +22,7 @@ t.test('run a test', t => { }) const lcErr = Object.assign(new Error('should not see this'), { - code: 'ELIFECYCLE' + code: 'ELIFECYCLE', }) const otherErr = new Error('should see this') diff --git a/deps/npm/test/lib/token.js b/deps/npm/test/lib/token.js index dc5a8ad05e4448..f9888107223d9b 100644 --- a/deps/npm/test/lib/token.js +++ b/deps/npm/test/lib/token.js @@ -6,7 +6,7 @@ const mocks = { profile: {}, output: () => {}, log: {}, - readUserInfo: {} + readUserInfo: {}, } const tokenMock = requireInject('../../lib/token.js', { @@ -17,28 +17,26 @@ const tokenMock = requireInject('../../lib/token.js', { }, '../../lib/utils/read-user-info.js': mocks.readUserInfo, 'npm-profile': mocks.profile, - 'npmlog': mocks.log + npmlog: mocks.log, }) const tokenWithMocks = (mockRequests) => { for (const mod in mockRequests) { - if (typeof mockRequests[mod] === 'function') { + if (typeof mockRequests[mod] === 'function') mocks[mod] = mockRequests[mod] - } else { - for (const key in mockRequests[mod]) { + else { + for (const key in mockRequests[mod]) mocks[mod][key] = mockRequests[mod][key] - } } } const reset = () => { for (const mod in mockRequests) { - if (typeof mockRequests[mod] === 'function') { + if (typeof mockRequests[mod] === 'function') mocks[mod] = () => {} - } else { - for (const key in mockRequests[mod]) { + else { + for (const key in mockRequests[mod]) delete mocks[mod][key] - } } } } @@ -51,9 +49,8 @@ test('completion', (t) => { const testComp = (argv, expect) => { tokenMock.completion({ conf: { argv: { remain: argv } } }, (err, res) => { - if (err) { + if (err) throw err - } t.strictSame(res, expect, argv.join(' ')) }) @@ -62,14 +59,14 @@ test('completion', (t) => { testComp(['npm', 'token'], [ 'list', 'revoke', - 'create' + 'create', ]) testComp(['npm', 'token', 'list'], []) testComp(['npm', 'token', 'revoke'], []) testComp(['npm', 'token', 'create'], []) - tokenMock.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar' ] } } }, (err) => { + tokenMock.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }, (err) => { t.match(err, { message: 'foobar not recognized' }) }) }) @@ -77,14 +74,14 @@ test('completion', (t) => { test('token foobar', (t) => { t.plan(2) - const [token, reset] = tokenWithMocks({ + const [, reset] = tokenWithMocks({ log: { gauge: { show: (name) => { t.equal(name, 'token', 'shows a gauge') - } - } - } + }, + }, + }, }) t.tearDown(reset) @@ -104,14 +101,14 @@ test('token list', (t) => { cidr_whitelist: null, readonly: false, created: now, - updated: now + updated: now, }, { key: 'abcd1256', token: 'hgfe8765', cidr_whitelist: ['192.168.1.1/32'], readonly: true, created: now, - updated: now + updated: now, }] const [token, reset] = tokenWithMocks({ @@ -121,25 +118,25 @@ test('token list', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, profile: { listTokens: (conf) => { t.same(conf.auth, { token: 'thisisnotarealtoken', otp: '123456' }) return tokens - } + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token') - } + }, }, info: (type, msg) => { t.equal(type, 'token') t.equal(msg, 'getting list') - } + }, }, output: (spec) => { const lines = spec.split(/\r?\n/) @@ -152,7 +149,7 @@ test('token list', (t) => { t.match(lines[5], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp') t.match(lines[5], ' yes ', 'includes the "no" string for readonly state') t.match(lines[5], ` ${tokens[1].cidr_whitelist.join(',')} `, 'includes the cidr whitelist') - } + }, }) t.tearDown(reset) @@ -172,7 +169,7 @@ test('token list json output', (t) => { cidr_whitelist: null, readonly: false, created: now, - updated: now + updated: now, }] const [token, reset] = tokenWithMocks({ @@ -182,31 +179,31 @@ test('token list json output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { username: 'foo', password: 'bar' } - } - } + }, + }, }, profile: { listTokens: (conf) => { t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth') return tokens - } + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token') - } + }, }, info: (type, msg) => { t.equal(type, 'token') t.equal(msg, 'getting list') - } + }, }, output: (spec) => { t.type(spec, 'string', 'is called with a string') const parsed = JSON.parse(spec) t.match(parsed, tokens, 'prints the json parsed tokens') - } + }, }) t.tearDown(reset) @@ -226,14 +223,14 @@ test('token list parseable output', (t) => { cidr_whitelist: null, readonly: false, created: now, - updated: now + updated: now, }, { key: 'efgh5678ijkl9101', token: 'hgfe8765', cidr_whitelist: ['192.168.1.1/32'], readonly: true, created: now, - updated: now + updated: now, }] let callCount = 0 @@ -245,37 +242,36 @@ test('token list parseable output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { auth: Buffer.from('foo:bar').toString('base64') } - } - } + }, + }, }, profile: { listTokens: (conf) => { t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth') return tokens - } + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token') - } + }, }, info: (type, msg) => { t.equal(type, 'token') t.equal(msg, 'getting list') - } + }, }, output: (spec) => { ++callCount t.type(spec, 'string', 'is called with a string') - if (callCount === 1) { + if (callCount === 1) t.equal(spec, ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), 'prints header') - } else if (callCount === 2) { + else if (callCount === 2) t.equal(spec, [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), 'prints token info') - } else { + else t.equal(spec, [tokens[1].key, tokens[1].token, tokens[1].created, tokens[1].readonly, tokens[1].cidr_whitelist.join(',')].join('\t'), 'prints token info') - } - } + }, }) t.tearDown(reset) @@ -295,14 +291,14 @@ test('token revoke', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return {} - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -311,24 +307,24 @@ test('token revoke', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: (conf) => { t.same(conf.auth, {}, 'passes the correct empty auth') return Promise.resolve([ - { key: 'abcd1234' } + { key: 'abcd1234' }, ]) }, removeToken: (key) => { t.equal(key, 'abcd1234', 'deletes the correct token') - } + }, }, output: (spec) => { t.equal(spec, 'Removed 1 token') - } + }, }) t.tearDown(reset) @@ -348,14 +344,14 @@ test('token revoke multiple tokens', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -364,23 +360,23 @@ test('token revoke multiple tokens', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ { key: 'abcd1234' }, - { key: 'efgh5678' } + { key: 'efgh5678' }, ]), removeToken: (key) => { // this will run twice t.ok(['abcd1234', 'efgh5678'].includes(key), 'deletes the correct token') - } + }, }, output: (spec) => { t.equal(spec, 'Removed 2 tokens') - } + }, }) t.tearDown(reset) @@ -400,14 +396,14 @@ test('token revoke json output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -416,23 +412,23 @@ test('token revoke json output', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ - { key: 'abcd1234' } + { key: 'abcd1234' }, ]), removeToken: (key) => { t.equal(key, 'abcd1234', 'deletes the correct token') - } + }, }, output: (spec) => { t.type(spec, 'string', 'is given a string') const parsed = JSON.parse(spec) t.same(parsed, ['abcd1234'], 'logs the token as json') - } + }, }) t.tearDown(reset) @@ -452,14 +448,14 @@ test('token revoke parseable output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -468,21 +464,21 @@ test('token revoke parseable output', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ - { key: 'abcd1234' } + { key: 'abcd1234' }, ]), removeToken: (key) => { t.equal(key, 'abcd1234', 'deletes the correct token') - } + }, }, output: (spec) => { t.equal(spec, 'abcd1234', 'logs the token as a string') - } + }, }) t.tearDown(reset) @@ -502,14 +498,14 @@ test('token revoke by token', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -518,21 +514,21 @@ test('token revoke by token', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ - { key: 'abcd1234', token: 'efgh5678' } + { key: 'abcd1234', token: 'efgh5678' }, ]), removeToken: (key) => { t.equal(key, 'efgh5678', 'passes through user input') - } + }, }, output: (spec) => { t.equal(spec, 'Removed 1 token') - } + }, }) t.tearDown(reset) @@ -550,9 +546,9 @@ test('token revoke requires an id', (t) => { gauge: { show: (name) => { t.equal(name, 'token') - } - } - } + }, + }, + }, }) t.tearDown(reset) @@ -572,14 +568,14 @@ test('token revoke ambiguous id errors', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -588,16 +584,16 @@ test('token revoke ambiguous id errors', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ { key: 'abcd1234' }, - { key: 'abcd5678' } - ]) - } + { key: 'abcd5678' }, + ]), + }, }) t.tearDown(reset) @@ -617,14 +613,14 @@ test('token revoke unknown id errors', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, newItem: (action, len) => { t.equal(action, 'removing tokens') @@ -633,15 +629,15 @@ test('token revoke unknown id errors', (t) => { info: (name, progress) => { t.equal(name, 'token') t.equal(progress, 'getting existing list') - } + }, } - } + }, }, profile: { listTokens: () => Promise.resolve([ - { key: 'abcd1234' } - ]) - } + { key: 'abcd1234' }, + ]), + }, }) t.tearDown(reset) @@ -664,22 +660,22 @@ test('token create', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, info: (name, message) => { t.equal(name, 'token') t.equal(message, 'creating') - } + }, }, readUserInfo: { - password: () => Promise.resolve(password) + password: () => Promise.resolve(password), }, profile: { createToken: (pw, readonly, cidr) => { @@ -692,9 +688,9 @@ test('token create', (t) => { created: now, updated: now, readonly: false, - cidr_whitelist: [] + cidr_whitelist: [], } - } + }, }, output: (spec) => { const lines = spec.split(/\r?\n/) @@ -705,7 +701,7 @@ test('token create', (t) => { t.match(lines[5], 'readonly') t.match(lines[5], 'false', 'prints the readonly flag') t.match(lines[7], 'cidr_whitelist') - } + }, }) t.tearDown(reset) @@ -728,22 +724,22 @@ test('token create json output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, info: (name, message) => { t.equal(name, 'token') t.equal(message, 'creating') - } + }, }, readUserInfo: { - password: () => Promise.resolve(password) + password: () => Promise.resolve(password), }, profile: { createToken: (pw, readonly, cidr) => { @@ -756,15 +752,15 @@ test('token create json output', (t) => { created: now, updated: now, readonly: false, - cidr_whitelist: [] + cidr_whitelist: [], } - } + }, }, output: (spec) => { t.type(spec, 'string', 'outputs a string') const parsed = JSON.parse(spec) t.same(parsed, { token: 'efgh5678', created: now, readonly: false, cidr_whitelist: [] }, 'outputs the correct object') - } + }, }) t.tearDown(reset) @@ -788,22 +784,22 @@ test('token create parseable output', (t) => { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } + }, }, info: (name, message) => { t.equal(name, 'token') t.equal(message, 'creating') - } + }, }, readUserInfo: { - password: () => Promise.resolve(password) + password: () => Promise.resolve(password), }, profile: { createToken: (pw, readonly, cidr) => { @@ -816,22 +812,21 @@ test('token create parseable output', (t) => { created: now, updated: now, readonly: false, - cidr_whitelist: [] + cidr_whitelist: [], } - } + }, }, output: (spec) => { ++callCount - if (callCount === 1) { + if (callCount === 1) t.match(spec, 'token\tefgh5678', 'prints the token') - } else if (callCount === 2) { + else if (callCount === 2) t.match(spec, `created\t${now}`, 'prints the created timestamp') - } else if (callCount === 3) { + else if (callCount === 3) t.match(spec, 'readonly\tfalse', 'prints the readonly flag') - } else { + else t.match(spec, 'cidr_whitelist\t', 'prints the cidr whitelist') - } - } + }, }) t.tearDown(reset) @@ -844,28 +839,28 @@ test('token create parseable output', (t) => { test('token create ipv6 cidr', (t) => { t.plan(4) - const now = new Date().toISOString() const password = 'thisisnotreallyapassword' const [token, reset] = tokenWithMocks({ npm: { - flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, config: { + flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, + config: { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } - } + }, + }, }, readUserInfo: { - password: () => Promise.resolve(password) - } + password: () => Promise.resolve(password), + }, }) t.tearDown(reset) @@ -879,28 +874,28 @@ test('token create ipv6 cidr', (t) => { test('token create invalid cidr', (t) => { t.plan(4) - const now = new Date().toISOString() const password = 'thisisnotreallyapassword' const [token, reset] = tokenWithMocks({ npm: { - flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, config: { + flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, + config: { getCredentialsByURI: (uri) => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } - } - } + }, + }, }, log: { gauge: { show: (name) => { t.equal(name, 'token', 'starts a gauge') - } - } + }, + }, }, readUserInfo: { - password: () => Promise.resolve(password) - } + password: () => Promise.resolve(password), + }, }) t.tearDown(reset) diff --git a/deps/npm/test/lib/utils/audit-error.js b/deps/npm/test/lib/utils/audit-error.js index f183a16e8d005f..cc5f4c006e14fc 100644 --- a/deps/npm/test/lib/utils/audit-error.js +++ b/deps/npm/test/lib/utils/audit-error.js @@ -6,14 +6,14 @@ const npm = { command: null, flatOptions: {}, log: { - warn: (...msg) => LOGS.push(msg) - } + warn: (...msg) => LOGS.push(msg), + }, } const OUTPUT = [] const output = (...msg) => OUTPUT.push(msg) const auditError = requireInject('../../../lib/utils/audit-error.js', { '../../../lib/npm.js': npm, - '../../../lib/utils/output.js': output + '../../../lib/utils/output.js': output, }) t.afterEach(cb => { @@ -40,10 +40,10 @@ t.test('error, not audit command', t => { method: 'POST', uri: 'https://example.com/not/a/registry', headers: { - head: ['ers'] + head: ['ers'], }, - statusCode: '420' - } + statusCode: '420', + }, }), true, 'had error') t.strictSame(OUTPUT, [], 'no output') t.strictSame(LOGS, [], 'no warnings') @@ -60,14 +60,14 @@ t.test('error, audit command, not json', t => { method: 'POST', uri: 'https://example.com/not/a/registry', headers: { - head: ['ers'] + head: ['ers'], }, - statusCode: '420' - } + statusCode: '420', + }, })) - t.strictSame(OUTPUT, [ [ 'body' ] ], 'some output') - t.strictSame(LOGS, [ [ 'audit', 'message' ] ], 'some warnings') + t.strictSame(OUTPUT, [['body']], 'some output') + t.strictSame(LOGS, [['audit', 'message']], 'some warnings') t.end() }) @@ -81,10 +81,10 @@ t.test('error, audit command, json', t => { method: 'POST', uri: 'https://example.com/not/a/registry', headers: { - head: ['ers'] + head: ['ers'], }, - statusCode: '420' - } + statusCode: '420', + }, })) t.strictSame(OUTPUT, [ @@ -102,9 +102,9 @@ t.test('error, audit command, json', t => { ' "body": {\n' + ' "response": "body"\n' + ' }\n' + - '}' - ] + '}', + ], ], 'some output') - t.strictSame(LOGS, [ [ 'audit', 'message' ] ], 'some warnings') + t.strictSame(LOGS, [['audit', 'message']], 'some warnings') t.end() }) diff --git a/deps/npm/test/lib/utils/cleanup-log-files.js b/deps/npm/test/lib/utils/cleanup-log-files.js index ee2c11e62ab7ae..7af0633fe715dd 100644 --- a/deps/npm/test/lib/utils/cleanup-log-files.js +++ b/deps/npm/test/lib/utils/cleanup-log-files.js @@ -6,7 +6,7 @@ const rimraf = require('rimraf') const mocks = { glob, rimraf } const cleanup = requireInject('../../../lib/utils/cleanup-log-files.js', { glob: (...args) => mocks.glob(...args), - rimraf: (...args) => mocks.rimraf(...args) + rimraf: (...args) => mocks.rimraf(...args), }) const { basename } = require('path') @@ -19,15 +19,15 @@ t.test('clean up those files', t => { '2-debug.log': 'hello', '3-debug.log': 'hello', '4-debug.log': 'hello', - '5-debug.log': 'hello' - } + '5-debug.log': 'hello', + }, }) const warn = (...warning) => t.fail('failed cleanup', { warning }) return cleanup(cache, 3, warn).then(() => { t.strictSame(fs.readdirSync(cache + '/_logs').sort(), [ '3-debug.log', '4-debug.log', - '5-debug.log' + '5-debug.log', ]) }) }) @@ -36,14 +36,14 @@ t.test('nothing to clean up', t => { const cache = t.testdir({ _logs: { '4-debug.log': 'hello', - '5-debug.log': 'hello' - } + '5-debug.log': 'hello', + }, }) const warn = (...warning) => t.fail('failed cleanup', { warning }) return cleanup(cache, 3, warn).then(() => { t.strictSame(fs.readdirSync(cache + '/_logs').sort(), [ '4-debug.log', - '5-debug.log' + '5-debug.log', ]) }) }) @@ -66,15 +66,15 @@ t.test('rimraf fail', t => { '2-debug.log': 'hello', '3-debug.log': 'hello', '4-debug.log': 'hello', - '5-debug.log': 'hello' - } + '5-debug.log': 'hello', + }, }) const warnings = [] const warn = (...warning) => warnings.push(basename(warning[2])) return cleanup(cache, 3, warn).then(() => { t.strictSame(warnings.sort((a, b) => a.localeCompare(b)), [ '1-debug.log', - '2-debug.log' + '2-debug.log', ]) }) }) diff --git a/deps/npm/test/lib/utils/completion/installed-deep.js b/deps/npm/test/lib/utils/completion/installed-deep.js index 74c15e8f77c9f8..a2a3756104f4b7 100644 --- a/deps/npm/test/lib/utils/completion/installed-deep.js +++ b/deps/npm/test/lib/utils/completion/installed-deep.js @@ -7,19 +7,25 @@ let globalDir = 'MISSING_GLOBAL_DIR' const _flatOptions = { depth: Infinity, global: false, - get prefix () { return prefix } + get prefix () { + return prefix + }, } const installedDeep = requireInject('../../../../lib/utils/completion/installed-deep.js', { '../../../../lib/npm.js': { flatOptions: _flatOptions, - get prefix () { return _flatOptions.prefix }, - get globalDir () { return globalDir }, + get prefix () { + return _flatOptions.prefix + }, + get globalDir () { + return globalDir + }, config: { get (key) { return _flatOptions[key] - } - } - } + }, + }, + }, }) const fixture = { @@ -29,14 +35,14 @@ const fixture = { dependencies: { a: '^1.0.0', b: '^1.0.0', - c: '^1.0.0' + c: '^1.0.0', }, devDependencies: { - d: '^1.0.0' + d: '^1.0.0', }, peerDependencies: { - e: '^1.0.0' - } + e: '^1.0.0', + }, }), node_modules: { a: { @@ -44,33 +50,33 @@ const fixture = { name: 'a', version: '1.0.0', dependencies: { - f: '^1.0.0' - } - }) + f: '^1.0.0', + }, + }), }, b: { 'package.json': JSON.stringify({ name: 'b', - version: '1.0.0' - }) + version: '1.0.0', + }), }, c: { 'package.json': JSON.stringify({ name: 'c', - version: '1.0.0' - }) + version: '1.0.0', + }), }, d: { 'package.json': JSON.stringify({ name: 'd', - version: '1.0.0' - }) + version: '1.0.0', + }), }, e: { 'package.json': JSON.stringify({ name: 'e', - version: '1.0.0' - }) + version: '1.0.0', + }), }, f: { 'package.json': JSON.stringify({ @@ -78,8 +84,8 @@ const fixture = { version: '1.0.0', dependencies: { g: '^1.0.0', - e: '^2.0.0' - } + e: '^2.0.0', + }, }), node_modules: { e: { @@ -87,27 +93,27 @@ const fixture = { name: 'e', version: '2.0.0', dependencies: { - bb: '^1.0.0' - } + bb: '^1.0.0', + }, }), node_modules: { bb: { 'package.json': JSON.stringify({ name: 'bb', - version: '1.0.0' - }) - } - } - } - } + version: '1.0.0', + }), + }, + }, + }, + }, }, g: { 'package.json': JSON.stringify({ name: 'g', - version: '1.0.0' - }) - } - } + version: '1.0.0', + }), + }, + }, } const globalFixture = { @@ -115,33 +121,33 @@ const globalFixture = { foo: { 'package.json': JSON.stringify({ name: 'foo', - version: '1.0.0' - }) + version: '1.0.0', + }), }, bar: { 'package.json': JSON.stringify({ name: 'bar', version: '1.0.0', dependencies: { - 'a-bar': '^1.0.0' - } + 'a-bar': '^1.0.0', + }, }), node_modules: { 'a-bar': { 'package.json': JSON.stringify({ name: 'a-bar', - version: '1.0.0' - }) - } - } - } - } + version: '1.0.0', + }), + }, + }, + }, + }, } test('get list of package names', (t) => { const fix = t.testdir({ local: fixture, - global: globalFixture + global: globalFixture, }) prefix = resolve(fix, 'local') @@ -152,12 +158,12 @@ test('get list of package names', (t) => { t.deepEqual( res, [ - [ 'bar', '-g' ], - [ 'foo', '-g' ], - [ 'a-bar', '-g' ], + ['bar', '-g'], + ['foo', '-g'], + ['a-bar', '-g'], 'a', 'b', 'c', 'd', 'e', 'f', - 'g', 'bb' + 'g', 'bb', ], 'should return list of package names and global flag' ) @@ -168,7 +174,7 @@ test('get list of package names', (t) => { test('get list of package names as global', (t) => { const fix = t.testdir({ local: fixture, - global: globalFixture + global: globalFixture, }) prefix = resolve(fix, 'local') @@ -183,7 +189,7 @@ test('get list of package names as global', (t) => { [ 'bar', 'foo', - 'a-bar' + 'a-bar', ], 'should return list of global packages with no extra flags' ) @@ -195,7 +201,7 @@ test('get list of package names as global', (t) => { test('limit depth', (t) => { const fix = t.testdir({ local: fixture, - global: globalFixture + global: globalFixture, }) prefix = resolve(fix, 'local') @@ -208,12 +214,12 @@ test('limit depth', (t) => { t.deepEqual( res, [ - [ 'bar', '-g' ], - [ 'foo', '-g' ], + ['bar', '-g'], + ['foo', '-g'], 'a', 'b', 'c', 'd', 'e', 'f', - 'g' + 'g', ], 'should print only packages up to the specified depth' ) @@ -225,7 +231,7 @@ test('limit depth', (t) => { test('limit depth as global', (t) => { const fix = t.testdir({ local: fixture, - global: globalFixture + global: globalFixture, }) prefix = resolve(fix, 'local') @@ -240,7 +246,7 @@ test('limit depth as global', (t) => { res, [ 'bar', - 'foo' + 'foo', ], 'should reorder so that packages above that level depth goes last' ) diff --git a/deps/npm/test/lib/utils/completion/installed-shallow.js b/deps/npm/test/lib/utils/completion/installed-shallow.js index eb628a8ce81e31..1d6369bc782545 100644 --- a/deps/npm/test/lib/utils/completion/installed-shallow.js +++ b/deps/npm/test/lib/utils/completion/installed-shallow.js @@ -6,7 +6,7 @@ const { resolve } = require('path') const p = '../../../../lib/utils/completion/installed-shallow.js' const installed = requireInject(p, { - '../../../../lib/npm.js': npm + '../../../../lib/npm.js': npm, }) t.test('global not set, include globals with -g', t => { @@ -15,32 +15,32 @@ t.test('global not set, include globals with -g', t => { node_modules: { x: {}, '@scope': { - y: {} - } - } + y: {}, + }, + }, }, local: { node_modules: { a: {}, '@scope': { - b: {} - } - } - } + b: {}, + }, + }, + }, }) npm.globalDir = resolve(dir, 'global/node_modules') npm.localDir = resolve(dir, 'local/node_modules') flatOptions.global = false const opt = { conf: { argv: { remain: [] } } } installed(opt, (er, res) => { - if (er) { + if (er) throw er - } + t.strictSame(res.sort(), [ '@scope/y -g', 'x -g', 'a', - '@scope/b' + '@scope/b', ].sort()) t.end() }) @@ -52,18 +52,18 @@ t.test('global set, include globals and not locals', t => { node_modules: { x: {}, '@scope': { - y: {} - } - } + y: {}, + }, + }, }, local: { node_modules: { a: {}, '@scope': { - b: {} - } - } - } + b: {}, + }, + }, + }, }) npm.globalDir = resolve(dir, 'global/node_modules') npm.localDir = resolve(dir, 'local/node_modules') @@ -72,7 +72,7 @@ t.test('global set, include globals and not locals', t => { installed(opt, (er, res) => { t.strictSame(res.sort(), [ '@scope/y', - 'x' + 'x', ].sort()) t.end() }) @@ -84,27 +84,27 @@ t.test('more than 3 items in argv, skip it', t => { node_modules: { x: {}, '@scope': { - y: {} - } - } + y: {}, + }, + }, }, local: { node_modules: { a: {}, '@scope': { - b: {} - } - } - } + b: {}, + }, + }, + }, }) npm.globalDir = resolve(dir, 'global/node_modules') npm.localDir = resolve(dir, 'local/node_modules') flatOptions.global = false const opt = { conf: { argv: { remain: [1, 2, 3, 4, 5, 6] } } } installed(opt, (er, res) => { - if (er) { + if (er) throw er - } + t.strictSame(res, null) t.end() }) diff --git a/deps/npm/test/lib/utils/completion/none.js b/deps/npm/test/lib/utils/completion/none.js index 27f713b81e16b8..70488be07ec159 100644 --- a/deps/npm/test/lib/utils/completion/none.js +++ b/deps/npm/test/lib/utils/completion/none.js @@ -1,6 +1,6 @@ const t = require('tap') const none = require('../../../../lib/utils/completion/none.js') -none({any:'thing'}, (er, res) => { +none({any: 'thing'}, (er, res) => { t.equal(er, null) t.strictSame(res, []) }) diff --git a/deps/npm/test/lib/utils/config.js b/deps/npm/test/lib/utils/config.js index e8133eecb24f1b..38fbe6753e75b0 100644 --- a/deps/npm/test/lib/utils/config.js +++ b/deps/npm/test/lib/utils/config.js @@ -4,25 +4,25 @@ Object.defineProperty(process, 'umask', { value: () => 0o26, writable: true, configurable: true, - enumerable: true + enumerable: true, }) // have to fake the node version, or else it'll only pass on this one Object.defineProperty(process, 'version', { - value: 'v14.8.0' + value: 'v14.8.0', }) t.formatSnapshot = obj => { - if (typeof obj !== 'object' || !obj || !obj.types) { + if (typeof obj !== 'object' || !obj || !obj.types) return obj - } + return { ...obj, defaults: { ...obj.defaults, - cache: '{CACHE DIR} ' + path.basename(obj.defaults.cache) + cache: '{CACHE DIR} ' + path.basename(obj.defaults.cache), }, - types: formatTypes(obj.types) + types: formatTypes(obj.types), } } @@ -38,19 +38,18 @@ const formatTypes = (types) => Object.entries(types).map(([key, value]) => { }, {}) const formatTypeValue = (value) => { - if (Array.isArray(value)) { + if (Array.isArray(value)) return value.map(formatTypeValue) - } else if (value === url) { + else if (value === url) return '{URL MODULE}' - } else if (value === path) { + else if (value === path) return '{PATH MODULE}' - } else if (value === semver) { + else if (value === semver) return '{SEMVER MODULE}' - } else if (typeof value === 'function') { + else if (typeof value === 'function') return `{${value.name} TYPE}` - } else { + else return value - } } process.env.ComSpec = 'cmd.exe' @@ -65,8 +64,8 @@ const networkInterfacesThrow = () => { throw new Error('no network interfaces for some reason') } const networkInterfaces = () => ({ - 'eth420': [{ address: '127.0.0.1' }], - 'eth69': [{ address: 'no place like home' }] + eth420: [{ address: '127.0.0.1' }], + eth69: [{ address: 'no place like home' }], }) const tmpdir = () => '/tmp' const os = { networkInterfaces, tmpdir } @@ -77,7 +76,7 @@ t.test('working network interfaces, not windows', t => { os, '@npmcli/ci-detect': () => false, '../../../lib/utils/is-windows.js': false, - '../../../package.json': pkg + '../../../package.json': pkg, }) t.matchSnapshot(config) t.end() @@ -88,7 +87,7 @@ t.test('no working network interfaces, on windows', t => { os: { tmpdir, networkInterfaces: networkInterfacesThrow }, '@npmcli/ci-detect': () => false, '../../../lib/utils/is-windows.js': true, - '../../../package.json': pkg + '../../../package.json': pkg, }) t.matchSnapshot(config) t.end() @@ -99,21 +98,21 @@ t.test('no process.umask() method', t => { value: null, writable: true, configurable: true, - enumerable: true + enumerable: true, }) t.teardown(() => { Object.defineProperty(process, 'umask', { value: () => 0o26, writable: true, configurable: true, - enumerable: true + enumerable: true, }) }) const config = requireInject('../../../lib/utils/config.js', { os: { tmpdir, networkInterfaces: networkInterfacesThrow }, '@npmcli/ci-detect': () => false, '../../../lib/utils/is-windows.js': true, - '../../../package.json': pkg + '../../../package.json': pkg, }) t.equal(config.defaults.umask, 0o22) t.matchSnapshot(config) @@ -125,7 +124,7 @@ t.test('no comspec on windows', t => { const config = requireInject('../../../lib/utils/config.js', { os: { tmpdir, networkInterfaces: networkInterfacesThrow }, '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': true + '../../../lib/utils/is-windows.js': true, }) t.equal(config.defaults.shell, 'cmd') t.end() @@ -136,7 +135,7 @@ t.test('no shell on posix', t => { const config = requireInject('../../../lib/utils/config.js', { os, '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false + '../../../lib/utils/is-windows.js': false, }) t.equal(config.defaults.shell, 'sh') t.end() @@ -147,7 +146,7 @@ t.test('no EDITOR env, use VISUAL', t => { const config = requireInject('../../../lib/utils/config.js', { os, '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false + '../../../lib/utils/is-windows.js': false, }) t.equal(config.defaults.editor, 'mate') t.end() @@ -158,7 +157,7 @@ t.test('no VISUAL, use system default, not windows', t => { const config = requireInject('../../../lib/utils/config.js', { os, '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': false + '../../../lib/utils/is-windows.js': false, }) t.equal(config.defaults.editor, 'vi') t.end() @@ -169,7 +168,7 @@ t.test('no VISUAL, use system default, not windows', t => { const config = requireInject('../../../lib/utils/config.js', { os, '@npmcli/ci-detect': () => false, - '../../../lib/utils/is-windows.js': true + '../../../lib/utils/is-windows.js': true, }) t.equal(config.defaults.editor, 'notepad.exe') t.end() diff --git a/deps/npm/test/lib/utils/error-handler.js b/deps/npm/test/lib/utils/error-handler.js index 9e32dfc5ff8dbf..2dc116a4d31711 100644 --- a/deps/npm/test/lib/utils/error-handler.js +++ b/deps/npm/test/lib/utils/error-handler.js @@ -26,23 +26,23 @@ t.cleanSnapshot = (str) => redactCwd(str) // internal modules mocks const cacheFile = { append: () => null, - write: () => null + write: () => null, } const config = { values: { cache: 'cachefolder', - timing: true + timing: true, }, loaded: true, updateNotification: null, get (key) { return this.values[key] - } + }, } const npm = { version: '1.0.0', - config + config, } const npmlog = { @@ -52,26 +52,34 @@ const npmlog = { id: this.record.length, level, message: args.reduce((res, i) => `${res} ${i.message ? i.message : i}`, ''), - prefix: level !== 'verbose' ? 'foo' : '' + prefix: level !== 'verbose' ? 'foo' : '', }) }, - error (...args) { this.log('error', ...args) }, - info (...args) { this.log('info', ...args) }, + error (...args) { + this.log('error', ...args) + }, + info (...args) { + this.log('info', ...args) + }, level: 'silly', levels: { silly: 0, verbose: 1, info: 2, error: 3, - silent: 4 + silent: 4, + }, + notice (...args) { + this.log('notice', ...args) }, - notice (...args) { this.log('notice', ...args) }, record: [], - verbose (...args) { this.log('verbose', ...args) } + verbose (...args) { + this.log('verbose', ...args) + }, } const metrics = { - stop: () => null + stop: () => null, } // overrides OS type/release for cross platform snapshots @@ -96,8 +104,10 @@ process = Object.assign( exit () {}, exitCode: 0, version: 'v1.0.0', - stdout: { write (_, cb) { cb() } }, - stderr: { write () {} } + stdout: { write (_, cb) { + cb() + } }, + stderr: { write () {} }, } ) // needs to put process back in its place @@ -112,10 +122,10 @@ const mocks = { '../../../lib/utils/error-message.js': (err) => ({ ...err, summary: [['ERR', err.message]], - detail: [['ERR', err.message]] + detail: [['ERR', err.message]], }), '../../../lib/utils/metrics.js': metrics, - '../../../lib/utils/cache-file.js': cacheFile + '../../../lib/utils/cache-file.js': cacheFile, } requireInject.installGlobally('../../../lib/utils/error-handler.js', mocks) @@ -226,8 +236,8 @@ t.test('console.log output using --json', (t) => { error: { code: 'EBADTHING', // should default error code to E[A-Z]+ summary: 'Error: EBADTHING Something happened', - detail: 'Error: EBADTHING Something happened' - } + detail: 'Error: EBADTHING Something happened', + }, }, 'should output expected json output' ) @@ -246,7 +256,7 @@ t.test('throw a non-error obj', (t) => { const weirdError = { code: 'ESOMETHING', - message: 'foo bar' + message: 'foo bar', } const _logError = npmlog.error @@ -379,7 +389,7 @@ t.test('uses code from errno', (t) => { errorHandler(Object.assign( new Error('Error with errno'), { - errno: 127 + errno: 127, } )) @@ -408,7 +418,7 @@ t.test('uses exitCode as code if using a number', (t) => { errorHandler(Object.assign( new Error('Error with code type number'), { - code: 404 + code: 404, } )) @@ -464,7 +474,7 @@ t.test('defaults to log error msg if stack is missing', (t) => { new Error('Error with no stack'), { code: 'ENOSTACK', - errno: 127 + errno: 127, } ) delete noStackErr.stack diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js index 2647a8e1994ea1..86db7c94bad496 100644 --- a/deps/npm/test/lib/utils/error-message.js +++ b/deps/npm/test/lib/utils/error-message.js @@ -7,13 +7,13 @@ process.getgid = () => 420 Object.defineProperty(process, 'arch', { value: 'x64', - configurable: true + configurable: true, }) const beWindows = () => { Object.defineProperty(process, 'platform', { value: 'win32', - configurable: true + configurable: true, }) delete require.cache[require.resolve('../../../lib/utils/is-windows.js')] } @@ -21,7 +21,7 @@ const beWindows = () => { const bePosix = () => { Object.defineProperty(process, 'platform', { value: 'posix', - configurable: true + configurable: true, }) delete require.cache[require.resolve('../../../lib/utils/is-windows.js')] } @@ -33,22 +33,21 @@ npm.config = { loaded: false, localPrefix: '/some/prefix/dir', get: key => { - if (key === 'cache') { + if (key === 'cache') return CACHE - } else if (key === 'node-version') { + else if (key === 'node-version') return '99.99.99' - } else if (key === 'global') { + else if (key === 'global') return false - } else { + else throw new Error('unexpected config lookup: ' + key) - } - } + }, } npm.version = '123.69.420-npm' Object.defineProperty(process, 'version', { value: '123.69.420-node', - configurable: true + configurable: true, }) const npmlog = require('npmlog') @@ -64,8 +63,8 @@ const errorMessage = requireInject('../../../lib/utils/error-message.js', { report: (...args) => { EXPLAIN_CALLED.push(args) return 'explanation' - } - } + }, + }, }) t.test('just simple messages', t => { @@ -92,7 +91,7 @@ t.test('just simple messages', t => { 'EINVALIDTYPE', 'ETOOMANYARGS', 'ETARGET', - 'E403' + 'E403', ] t.plan(codes.length) codes.forEach(code => { @@ -100,13 +99,12 @@ t.test('just simple messages', t => { const pkgid = 'some@package' const file = '/some/file' const stack = 'dummy stack trace' - const required = { node: '1.2.3', npm: '4.2.0' } const er = Object.assign(new Error('foo'), { code, path, pkgid, file, - stack + stack, }) t.matchSnapshot(errorMessage(er)) }) @@ -132,18 +130,19 @@ t.test('replace message/stack sensistive info', t => { t.test('bad engine with config loaded', t => { npm.config.loaded = true - t.teardown(() => { npm.config.loaded = false }) + t.teardown(() => { + npm.config.loaded = false + }) const path = '/some/path' const pkgid = 'some@package' const file = '/some/file' const stack = 'dummy stack trace' - const required = { node: '1.2.3', npm: '4.2.0' } const er = Object.assign(new Error('foo'), { code: 'EBADENGINE', path, pkgid, file, - stack + stack, }) t.matchSnapshot(errorMessage(er)) t.end() @@ -152,14 +151,12 @@ t.test('bad engine with config loaded', t => { t.test('enoent without a file', t => { const path = '/some/path' const pkgid = 'some@package' - const file = '/some/file' const stack = 'dummy stack trace' - const required = { node: '1.2.3', npm: '4.2.0' } const er = Object.assign(new Error('foo'), { code: 'ENOENT', path, pkgid, - stack + stack, }) t.matchSnapshot(errorMessage(er)) t.end() @@ -171,13 +168,12 @@ t.test('enolock without a command', t => { const pkgid = 'some@package' const file = '/some/file' const stack = 'dummy stack trace' - const required = { node: '1.2.3', npm: '4.2.0' } const er = Object.assign(new Error('foo'), { code: 'ENOLOCK', path, pkgid, file, - stack + stack, }) t.matchSnapshot(errorMessage(er)) t.end() @@ -191,18 +187,18 @@ t.test('default message', t => { signal: 'SIGYOLO', args: ['a', 'r', 'g', 's'], stdout: 'stdout', - stderr: 'stderr' + stderr: 'stderr', }))) t.end() }) t.test('eacces/eperm', t => { const runTest = (windows, loaded, cachePath, cacheDest) => t => { - if (windows) { + if (windows) beWindows() - } else { + else bePosix() - } + npm.config.loaded = loaded const path = `${cachePath ? CACHE : '/not/cache/dir'}/path` const dest = `${cacheDest ? CACHE : '/not/cache/dir'}/dest` @@ -210,7 +206,7 @@ t.test('eacces/eperm', t => { code: 'EACCES', path, dest, - stack: 'dummy stack trace' + stack: 'dummy stack trace', }) verboseLogs.length = 0 t.matchSnapshot(errorMessage(er)) @@ -272,36 +268,36 @@ t.test('json parse', t => { } } } -` +`, }) const { prefix } = npm const { argv } = process t.teardown(() => { Object.defineProperty(npm, 'prefix', { value: prefix, - configurable: true + configurable: true, }) process.argv = argv }) Object.defineProperty(npm, 'prefix', { value: dir, configurable: true }) process.argv = ['arg', 'v'] - const ok = t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), { + t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), { code: 'EJSONPARSE', - file: resolve(dir, 'package.json') + file: resolve(dir, 'package.json'), }))) t.end() }) t.test('just regular bad json in package.json', t => { const dir = t.testdir({ - 'package.json': 'not even slightly json' + 'package.json': 'not even slightly json', }) const { prefix } = npm const { argv } = process t.teardown(() => { Object.defineProperty(npm, 'prefix', { value: prefix, - configurable: true + configurable: true, }) process.argv = argv }) @@ -309,14 +305,14 @@ t.test('json parse', t => { process.argv = ['arg', 'v'] t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', - file: resolve(dir, 'package.json') + file: resolve(dir, 'package.json'), }))) t.end() }) t.test('json somewhere else', t => { const dir = t.testdir({ - 'blerg.json': 'not even slightly json' + 'blerg.json': 'not even slightly json', }) const { argv } = process t.teardown(() => { @@ -325,7 +321,7 @@ t.test('json parse', t => { process.argv = ['arg', 'v'] t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', - file: `${dir}/blerg.json` + file: `${dir}/blerg.json`, }))) t.end() }) @@ -336,7 +332,7 @@ t.test('json parse', t => { t.test('eotp/e401', t => { t.test('401, no auth headers', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { - code: 'E401' + code: 'E401', }))) t.end() }) @@ -350,7 +346,7 @@ t.test('eotp/e401', t => { t.test('one-time pass challenge code', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { - code: 'EOTP' + code: 'EOTP', }))) t.end() }) @@ -358,7 +354,7 @@ t.test('eotp/e401', t => { t.test('one-time pass challenge message', t => { const message = 'one-time pass' t.matchSnapshot(errorMessage(Object.assign(new Error(message), { - code: 'E401' + code: 'E401', }))) t.end() }) @@ -368,16 +364,16 @@ t.test('eotp/e401', t => { 'Bearer realm=do, charset="UTF-8", challenge="yourself"', 'Basic realm=by, charset="UTF-8", challenge="your friends"', 'PickACardAnyCard realm=friday, charset="UTF-8"', - 'WashYourHands, charset="UTF-8"' + 'WashYourHands, charset="UTF-8"', ] t.plan(auths.length) for (const auth of auths) { t.test(auth, t => { const er = Object.assign(new Error('challenge!'), { headers: { - 'www-authenticate': [ auth ] + 'www-authenticate': [auth], }, - code: 'E401' + code: 'E401', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -397,7 +393,7 @@ t.test('404', t => { t.test('you should publish it', t => { const er = Object.assign(new Error('404 not found'), { pkgid: 'yolo', - code: 'E404' + code: 'E404', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -405,7 +401,7 @@ t.test('404', t => { t.test('name with warning', t => { const er = Object.assign(new Error('404 not found'), { pkgid: new Array(215).fill('x').join(''), - code: 'E404' + code: 'E404', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -413,7 +409,7 @@ t.test('404', t => { t.test('name with error', t => { const er = Object.assign(new Error('404 not found'), { pkgid: 'node_modules', - code: 'E404' + code: 'E404', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -427,13 +423,13 @@ t.test('bad platform', t => { pkgid: 'lodash@1.0.0', current: { os: 'posix', - cpu: 'x64' + cpu: 'x64', }, required: { os: '!yours', - cpu: 'x420' + cpu: 'x420', }, - code: 'EBADPLATFORM' + code: 'EBADPLATFORM', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -443,13 +439,13 @@ t.test('bad platform', t => { pkgid: 'lodash@1.0.0', current: { os: 'posix', - cpu: 'x64' + cpu: 'x64', }, required: { os: ['!yours', 'mine'], - cpu: ['x420', 'x69'] + cpu: ['x420', 'x69'], }, - code: 'EBADPLATFORM' + code: 'EBADPLATFORM', }) t.matchSnapshot(errorMessage(er)) t.end() @@ -460,7 +456,7 @@ t.test('bad platform', t => { t.test('explain ERESOLVE errors', t => { const er = Object.assign(new Error('could not resolve'), { - code: 'ERESOLVE' + code: 'ERESOLVE', }) t.matchSnapshot(errorMessage(er)) t.strictSame(EXPLAIN_CALLED, [[er]]) diff --git a/deps/npm/test/lib/utils/escape-arg.js b/deps/npm/test/lib/utils/escape-arg.js index 413fa47838bac6..b80a63f0b877bc 100644 --- a/deps/npm/test/lib/utils/escape-arg.js +++ b/deps/npm/test/lib/utils/escape-arg.js @@ -2,7 +2,7 @@ const requireInject = require('require-inject') const t = require('tap') const getEscape = win => requireInject('../../../lib/utils/escape-arg.js', { '../../../lib/utils/is-windows.js': win, - path: require('path')[win ? 'win32' : 'posix'] + path: require('path')[win ? 'win32' : 'posix'], }) const winEscape = getEscape(true) diff --git a/deps/npm/test/lib/utils/escape-exec-path.js b/deps/npm/test/lib/utils/escape-exec-path.js index 28fe75c2a98f4f..f16c576ec5550e 100644 --- a/deps/npm/test/lib/utils/escape-exec-path.js +++ b/deps/npm/test/lib/utils/escape-exec-path.js @@ -2,7 +2,7 @@ const requireInject = require('require-inject') const t = require('tap') const getEscape = win => requireInject('../../../lib/utils/escape-exec-path.js', { '../../../lib/utils/is-windows.js': win, - path: require('path')[win ? 'win32' : 'posix'] + path: require('path')[win ? 'win32' : 'posix'], }) const winEscape = getEscape(true) diff --git a/deps/npm/test/lib/utils/explain-dep.js b/deps/npm/test/lib/utils/explain-dep.js index 9a205e3c39ce27..28f14477ab709b 100644 --- a/deps/npm/test/lib/utils/explain-dep.js +++ b/deps/npm/test/lib/utils/explain-dep.js @@ -143,7 +143,7 @@ cases.manyDeps = { { type: 'prod', name: 'manydep', - spec:'>1.0.0-beta <1.0.1', + spec: '>1.0.0-beta <1.0.1', from: { location: '/path/to/project', }, diff --git a/deps/npm/test/lib/utils/explain-eresolve.js b/deps/npm/test/lib/utils/explain-eresolve.js index def13153d242dd..8dae1b92cd514f 100644 --- a/deps/npm/test/lib/utils/explain-eresolve.js +++ b/deps/npm/test/lib/utils/explain-eresolve.js @@ -2,7 +2,7 @@ const t = require('tap') const requireInject = require('require-inject') const npm = {} const { explain, report } = requireInject('../../../lib/utils/explain-eresolve.js', { - '../../../lib/npm.js': npm + '../../../lib/npm.js': npm, }) const { statSync, readFileSync, unlinkSync } = require('fs') // strip out timestamps from reports diff --git a/deps/npm/test/lib/utils/file-exists.js b/deps/npm/test/lib/utils/file-exists.js index f247f564e0766a..473a4b050edef2 100644 --- a/deps/npm/test/lib/utils/file-exists.js +++ b/deps/npm/test/lib/utils/file-exists.js @@ -3,7 +3,7 @@ const fileExists = require('../../../lib/utils/file-exists.js') test('returns true when arg is a file', async (t) => { const path = t.testdir({ - foo: 'just some file' + foo: 'just some file', }) const result = await fileExists(`${path}/foo`) @@ -13,7 +13,7 @@ test('returns true when arg is a file', async (t) => { test('returns false when arg is not a file', async (t) => { const path = t.testdir({ - foo: {} + foo: {}, }) const result = await fileExists(`${path}/foo`) diff --git a/deps/npm/test/lib/utils/flat-options.js b/deps/npm/test/lib/utils/flat-options.js index 7601c78d27a285..82c00fc7e5de25 100644 --- a/deps/npm/test/lib/utils/flat-options.js +++ b/deps/npm/test/lib/utils/flat-options.js @@ -12,7 +12,7 @@ class Mocknpm { this.modes = { exec: 0o777, file: 0o666, - umask: 0o22 + umask: 0o22, } this.color = true this.projectScope = '@npmcli' @@ -110,14 +110,16 @@ class MockConfig { 'user-agent': 'user-agent', '@scope:registry': '@scope:registry', '//nerf.dart:_authToken': '//nerf.dart:_authToken', - 'proxy': 'proxy', - 'noproxy': 'noproxy', - ...opts + proxy: 'proxy', + noproxy: 'noproxy', + ...opts, }] } + get (key) { return this.list[0][key] } + set (key, val) { this.list[0][key] = val } @@ -127,7 +129,7 @@ const flatOptions = require('../../../lib/utils/flat-options.js') t.match(logs, [[ 'verbose', 'npm-session', - /^[0-9a-f]{16}$/ + /^[0-9a-f]{16}$/, ]], 'logged npm session verbosely') logs.length = 0 @@ -139,7 +141,7 @@ t.test('basic', t => { npmBin: '/path/to/npm/bin.js', log: {}, npmSession: '12345', - cache: generatedFlat.cache.replace(/\\/g, '/') + cache: generatedFlat.cache.replace(/\\/g, '/'), } t.matchSnapshot(clean, 'flat options') t.equal(generatedFlat.npmCommand, null, 'command not set yet') @@ -158,7 +160,7 @@ t.test('basic', t => { t.test('get preferOffline from cache-min', t => { const npm = new Mocknpm({ 'cache-min': 9999999, - 'prefer-offline': undefined + 'prefer-offline': undefined, }) const opts = flatOptions(npm) t.equal(opts.preferOffline, true, 'got preferOffline from cache min') @@ -172,7 +174,7 @@ t.test('get preferOffline from cache-min', t => { t.test('get preferOnline from cache-max', t => { const npm = new Mocknpm({ 'cache-max': -1, - 'prefer-online': undefined + 'prefer-online': undefined, }) const opts = flatOptions(npm) t.equal(opts.preferOnline, true, 'got preferOnline from cache min') @@ -194,7 +196,7 @@ t.test('tag emits warning', t => { t.test('omit/include options', t => { t.test('omit explicitly', t => { const npm = new Mocknpm({ - omit: ['dev', 'optional', 'peer'] + omit: ['dev', 'optional', 'peer'], }) t.strictSame(flatOptions(npm).omit, ['dev', 'optional', 'peer']) t.end() @@ -203,7 +205,7 @@ t.test('omit/include options', t => { t.test('omit and include some', t => { const npm = new Mocknpm({ omit: ['dev', 'optional', 'peer'], - include: ['peer'] + include: ['peer'], }) t.strictSame(flatOptions(npm).omit, ['dev', 'optional']) t.end() @@ -213,7 +215,7 @@ t.test('omit/include options', t => { const npm = new Mocknpm({ omit: ['dev', 'optional', 'peer'], include: [], - dev: true + dev: true, }) t.strictSame(flatOptions(npm).omit, ['optional', 'peer']) t.end() @@ -223,7 +225,7 @@ t.test('omit/include options', t => { const npm = new Mocknpm({ omit: [], include: [], - production: true + production: true, }) t.strictSame(flatOptions(npm).omit, ['dev']) t.end() @@ -236,7 +238,7 @@ t.test('omit/include options', t => { const npm = new Mocknpm({ omit: [], include: [], - only: c + only: c, }) t.strictSame(flatOptions(npm).omit, ['dev']) t.end() @@ -246,7 +248,7 @@ t.test('omit/include options', t => { t.test('also dev', t => { const npm = new Mocknpm({ omit: ['dev', 'optional', 'peer'], - also: 'dev' + also: 'dev', }) t.strictSame(flatOptions(npm).omit, ['optional', 'peer']) t.end() @@ -256,7 +258,7 @@ t.test('omit/include options', t => { const npm = new Mocknpm({ optional: false, omit: null, - include: null + include: null, }) t.strictSame(flatOptions(npm).omit, ['optional']) t.end() @@ -276,9 +278,9 @@ t.test('various default values and falsey fallbacks', t => { 'script-shell': false, registry: 'http://example.com', 'metrics-registry': null, - 'searchlimit': 0, + searchlimit: 0, 'save-exact': false, - 'save-prefix': '>=' + 'save-prefix': '>=', }) const opts = flatOptions(npm) t.equal(opts.scriptShell, undefined, 'scriptShell is undefined if falsey') @@ -298,7 +300,7 @@ t.test('legacy _auth token', t => { t.strictSame( flatOptions(npm)._auth, 'asdfasdf', - 'should set legacy _auth token', + 'should set legacy _auth token' ) t.end() }) @@ -308,7 +310,7 @@ t.test('save-type', t => { 'save-optional': false, 'save-peer': false, 'save-dev': false, - 'save-prod': false + 'save-prod': false, } const cases = [ ['peerOptional', { @@ -316,23 +318,23 @@ t.test('save-type', t => { 'save-peer': true, }], ['optional', { - 'save-optional': true + 'save-optional': true, }], ['dev', { - 'save-dev': true + 'save-dev': true, }], ['peer', { - 'save-peer': true + 'save-peer': true, }], ['prod', { - 'save-prod': true + 'save-prod': true, }], - [null, {}] + [null, {}], ] for (const [expect, options] of cases) { const opts = flatOptions(new Mocknpm({ ...base, - ...options + ...options, })) t.equal(opts.saveType, expect, JSON.stringify(options)) } diff --git a/deps/npm/test/lib/utils/get-identity.js b/deps/npm/test/lib/utils/get-identity.js index c72f48b2e8f62a..8a4de8835257a6 100644 --- a/deps/npm/test/lib/utils/get-identity.js +++ b/deps/npm/test/lib/utils/get-identity.js @@ -4,7 +4,7 @@ const requireInject = require('require-inject') test('throws ENOREGISTRY when no registry option is provided', async (t) => { t.plan(2) const getIdentity = requireInject('../../../lib/utils/get-identity.js', { - '../../../lib/npm.js': {} + '../../../lib/npm.js': {}, }) try { @@ -23,9 +23,9 @@ test('returns username from uri when provided', async (t) => { config: { getCredentialsByURI: () => { return { username: 'foo' } - } - } - } + }, + }, + }, }) const identity = await getIdentity({ registry: 'https://registry.npmjs.org' }) @@ -37,22 +37,22 @@ test('calls registry whoami when token is provided', async (t) => { const options = { registry: 'https://registry.npmjs.org', - token: 'thisisnotreallyatoken' + token: 'thisisnotreallyatoken', } const getIdentity = requireInject('../../../lib/utils/get-identity.js', { '../../../lib/npm.js': { config: { - getCredentialsByURI: () => options - } + getCredentialsByURI: () => options, + }, }, 'npm-registry-fetch': { json: (path, opts) => { t.equal(path, '/-/whoami', 'calls whoami') t.same(opts, options, 'passes through provided options') return { username: 'foo' } - } - } + }, + }, }) const identity = await getIdentity(options) @@ -64,22 +64,22 @@ test('throws ENEEDAUTH when response does not include a username', async (t) => const options = { registry: 'https://registry.npmjs.org', - token: 'thisisnotreallyatoken' + token: 'thisisnotreallyatoken', } const getIdentity = requireInject('../../../lib/utils/get-identity.js', { '../../../lib/npm.js': { config: { - getCredentialsByURI: () => options - } + getCredentialsByURI: () => options, + }, }, 'npm-registry-fetch': { json: (path, opts) => { t.equal(path, '/-/whoami', 'calls whoami') t.same(opts, options, 'passes through provided options') return {} - } - } + }, + }, }) try { @@ -94,9 +94,9 @@ test('throws ENEEDAUTH when neither username nor token is configured', async (t) const getIdentity = requireInject('../../../lib/utils/get-identity.js', { '../../../lib/npm.js': { config: { - getCredentialsByURI: () => ({}) - } - } + getCredentialsByURI: () => ({}), + }, + }, }) try { diff --git a/deps/npm/test/lib/utils/get-project-scope.js b/deps/npm/test/lib/utils/get-project-scope.js index 15ab2bdeff1054..9737b06433c227 100644 --- a/deps/npm/test/lib/utils/get-project-scope.js +++ b/deps/npm/test/lib/utils/get-project-scope.js @@ -3,7 +3,7 @@ const t = require('tap') t.test('package.json with scope', t => { const dir = t.testdir({ - 'package.json': JSON.stringify({ name: '@foo/bar' }) + 'package.json': JSON.stringify({ name: '@foo/bar' }), }) t.equal(getProjectScope(dir), '@foo') t.end() @@ -11,7 +11,7 @@ t.test('package.json with scope', t => { t.test('package.json with slash, but no @', t => { const dir = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo/bar' }) + 'package.json': JSON.stringify({ name: 'foo/bar' }), }) t.equal(getProjectScope(dir), '') t.end() @@ -19,7 +19,7 @@ t.test('package.json with slash, but no @', t => { t.test('package.json without scope', t => { const dir = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }) + 'package.json': JSON.stringify({ name: 'foo' }), }) t.equal(getProjectScope(dir), '') t.end() @@ -27,7 +27,7 @@ t.test('package.json without scope', t => { t.test('package.json without name', t => { const dir = t.testdir({ - 'package.json': JSON.stringify({}) + 'package.json': JSON.stringify({}), }) t.equal(getProjectScope(dir), '') t.end() @@ -35,7 +35,7 @@ t.test('package.json without name', t => { t.test('package.json not JSON', t => { const dir = t.testdir({ - 'package.json': 'hello' + 'package.json': 'hello', }) t.equal(getProjectScope(dir), '') t.end() diff --git a/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js b/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js index f87cb84eed8232..516d3d5867acbd 100644 --- a/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js +++ b/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js @@ -9,13 +9,13 @@ t.equal(hostedFromMani({ repository: 'not hosted anywhere' }), null) t.equal(hostedFromMani({ repository: { url: 'not hosted anywhere' } }), null) t.match(hostedFromMani({ - repository: 'git+https://github.com/isaacs/abbrev-js' + repository: 'git+https://github.com/isaacs/abbrev-js', }), hostedGitInfo.fromUrl('git+https://github.com/isaacs/abbrev-js')) t.match(hostedFromMani({ - repository: { url: 'git+https://github.com/isaacs/abbrev-js' } + repository: { url: 'git+https://github.com/isaacs/abbrev-js' }, }), hostedGitInfo.fromUrl('https://github.com/isaacs/abbrev-js')) t.match(hostedFromMani({ - repository: { url: 'git+ssh://git@github.com/isaacs/abbrev-js' } + repository: { url: 'git+ssh://git@github.com/isaacs/abbrev-js' }, }), hostedGitInfo.fromUrl('ssh://git@github.com/isaacs/abbrev-js')) diff --git a/deps/npm/test/lib/utils/is-windows-bash.js b/deps/npm/test/lib/utils/is-windows-bash.js index 730dfe301bc764..94fde0ace17ce4 100644 --- a/deps/npm/test/lib/utils/is-windows-bash.js +++ b/deps/npm/test/lib/utils/is-windows-bash.js @@ -8,13 +8,13 @@ const isWindowsBash = () => { Object.defineProperty(process, 'platform', { value: 'posix', - configurable: true + configurable: true, }) t.equal(isWindowsBash(), false, 'false when not windows') Object.defineProperty(process, 'platform', { value: 'win32', - configurable: true + configurable: true, }) process.env.MSYSTEM = 'not ming' process.env.TERM = 'dumb' diff --git a/deps/npm/test/lib/utils/is-windows-shell.js b/deps/npm/test/lib/utils/is-windows-shell.js index e2164c222be67d..95519925c97ce6 100644 --- a/deps/npm/test/lib/utils/is-windows-shell.js +++ b/deps/npm/test/lib/utils/is-windows-shell.js @@ -1,6 +1,6 @@ const t = require('tap') Object.defineProperty(process, 'platform', { - value: 'win32' + value: 'win32', }) const isWindows = require('../../../lib/utils/is-windows.js') const isWindowsBash = require('../../../lib/utils/is-windows-bash.js') diff --git a/deps/npm/test/lib/utils/is-windows.js b/deps/npm/test/lib/utils/is-windows.js index 9100071699e771..f8f2999c99433d 100644 --- a/deps/npm/test/lib/utils/is-windows.js +++ b/deps/npm/test/lib/utils/is-windows.js @@ -2,7 +2,7 @@ const t = require('tap') const actuallyWindows = process.platform === 'win32' t.equal(actuallyWindows, require('../../../lib/utils/is-windows.js')) Object.defineProperty(process, 'platform', { - value: actuallyWindows ? 'posix' : 'win32' + value: actuallyWindows ? 'posix' : 'win32', }) delete require.cache[require.resolve('../../../lib/utils/is-windows.js')] t.equal(!actuallyWindows, require('../../../lib/utils/is-windows.js')) diff --git a/deps/npm/test/lib/utils/lifecycle-cmd.js b/deps/npm/test/lib/utils/lifecycle-cmd.js index 7338229546cf52..0eb342cee50124 100644 --- a/deps/npm/test/lib/utils/lifecycle-cmd.js +++ b/deps/npm/test/lib/utils/lifecycle-cmd.js @@ -3,9 +3,9 @@ const requireInject = require('require-inject') const lifecycleCmd = requireInject('../../../lib/utils/lifecycle-cmd.js', { '../../../lib/npm.js': { commands: { - run: (args, cb) => cb(null, 'called npm.commands.run') - } - } + run: (args, cb) => cb(null, 'called npm.commands.run'), + }, + }, }) t.test('create a lifecycle command', t => { diff --git a/deps/npm/test/lib/utils/path.js b/deps/npm/test/lib/utils/path.js index facee06459d4c0..74fb93462f7553 100644 --- a/deps/npm/test/lib/utils/path.js +++ b/deps/npm/test/lib/utils/path.js @@ -3,7 +3,7 @@ const requireInject = require('require-inject') const mod = '../../../lib/utils/path.js' const delim = require('../../../lib/utils/is-windows.js') ? ';' : ':' Object.defineProperty(process, 'env', { - value: {} + value: {}, }) process.env.path = ['foo', 'bar', 'baz'].join(delim) t.strictSame(requireInject(mod), ['foo', 'bar', 'baz']) diff --git a/deps/npm/test/lib/utils/perf.js b/deps/npm/test/lib/utils/perf.js index 9b38a3da8198c7..840dcb6e32399b 100644 --- a/deps/npm/test/lib/utils/perf.js +++ b/deps/npm/test/lib/utils/perf.js @@ -20,15 +20,15 @@ t.test('time some stuff', t => { process.emit('timeEnd', 'foo') process.emit('timeEnd', 'baz') t.match(logs, [ - [ 'timing', 'foo', /Completed in [0-9]+ms/ ], - [ 'timing', 'bar', /Completed in [0-9]+ms/ ], - [ 'timing', 'foo', /Completed in [0-9]+ms/ ], + ['timing', 'foo', /Completed in [0-9]+ms/], + ['timing', 'bar', /Completed in [0-9]+ms/], + ['timing', 'foo', /Completed in [0-9]+ms/], [ 'silly', 'timing', "Tried to end timer that doesn't exist:", - 'baz' - ] + 'baz', + ], ]) t.match(timings, { foo: Number, bar: Number }) t.equal(timings.foo > timings.bar, true, 'foo should be > bar') diff --git a/deps/npm/test/lib/utils/ping.js b/deps/npm/test/lib/utils/ping.js index d2b269556e6058..6e0451538f9fac 100644 --- a/deps/npm/test/lib/utils/ping.js +++ b/deps/npm/test/lib/utils/ping.js @@ -11,7 +11,7 @@ test('pings', async (t) => { t.equal(url, '/-/ping?write=true', 'calls the correct url') t.equal(opts, options, 'passes through options') return { json: () => Promise.resolve(response) } - } + }, }) const res = await ping(options) @@ -28,7 +28,7 @@ test('catches errors and returns empty json', async (t) => { t.equal(url, '/-/ping?write=true', 'calls the correct url') t.equal(opts, options, 'passes through options') return { json: () => Promise.reject(response) } - } + }, }) const res = await ping(options) diff --git a/deps/npm/test/lib/utils/proc-log-listener.js b/deps/npm/test/lib/utils/proc-log-listener.js index 0a6119d1a1c0ed..2c1009503762d5 100644 --- a/deps/npm/test/lib/utils/proc-log-listener.js +++ b/deps/npm/test/lib/utils/proc-log-listener.js @@ -5,11 +5,11 @@ const { inspect } = require('util') const logs = [] const npmlog = { warn: (...args) => logs.push(['warn', ...args]), - verbose: (...args) => logs.push(['verbose', ...args]) + verbose: (...args) => logs.push(['verbose', ...args]), } requireInject('../../../lib/utils/proc-log-listener.js', { - npmlog + npmlog, })() process.emit('log', 'warn', 'hello', 'i am a warning') @@ -17,22 +17,26 @@ t.strictSame(logs, [['warn', 'hello', 'i am a warning']]) logs.length = 0 const nopeError = new Error('nope') -npmlog.warn = () => { throw nopeError } +npmlog.warn = () => { + throw nopeError +} process.emit('log', 'warn', 'fail') t.strictSame(logs, [[ 'verbose', `attempt to log ${inspect(['warn', 'fail'])} crashed`, - nopeError + nopeError, ]]) logs.length = 0 -npmlog.verbose = () => { throw nopeError } +npmlog.verbose = () => { + throw nopeError +} const consoleErrors = [] console.error = (...args) => consoleErrors.push(args) process.emit('log', 'warn', 'fail2') t.strictSame(logs, []) t.strictSame(consoleErrors, [[ `attempt to log ${inspect(['warn', 'fail2'])} crashed`, - nopeError + nopeError, ]]) diff --git a/deps/npm/test/lib/utils/read-local-package.js b/deps/npm/test/lib/utils/read-local-package.js index 8854cf4e5f2179..33a408eb532379 100644 --- a/deps/npm/test/lib/utils/read-local-package.js +++ b/deps/npm/test/lib/utils/read-local-package.js @@ -5,21 +5,23 @@ let prefix const _flatOptions = { json: false, global: false, - get prefix () { return prefix } + get prefix () { + return prefix + }, } const readLocalPackageName = requireInject('../../../lib/utils/read-local-package.js', { '../../../lib/npm.js': { - flatOptions: _flatOptions - } + flatOptions: _flatOptions, + }, }) test('read local package.json', async (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'my-local-package', - version: '1.0.0' - }) + version: '1.0.0', + }), }) const packageName = await readLocalPackageName() t.equal( @@ -33,8 +35,8 @@ test('read local scoped-package.json', async (t) => { prefix = t.testdir({ 'package.json': JSON.stringify({ name: '@my-scope/my-local-package', - version: '1.0.0' - }) + version: '1.0.0', + }), }) const packageName = await readLocalPackageName() t.equal( diff --git a/deps/npm/test/lib/utils/reify-finish.js b/deps/npm/test/lib/utils/reify-finish.js new file mode 100644 index 00000000000000..d6c7d2e7b2d6d8 --- /dev/null +++ b/deps/npm/test/lib/utils/reify-finish.js @@ -0,0 +1,80 @@ +const t = require('tap') +const requireInject = require('require-inject') + +const npm = { + config: { + data: { + get: () => builtinConfMock, + }, + }, +} + +const builtinConfMock = { + loadError: new Error('no builtin config'), + raw: { hasBuiltinConfig: true, x: 'y', nested: { foo: 'bar' }}, +} + +const reifyOutput = () => {} + +let expectWrite = false +const realFs = require('fs') +const fs = { + ...realFs, + promises: { + ...realFs.promises, + writeFile: async (path, data) => { + if (!expectWrite) + throw new Error('did not expect to write builtin config file') + return realFs.promises.writeFile(path, data) + }, + }, +} + +const reifyFinish = requireInject('../../../lib/utils/reify-finish.js', { + fs, + '../../../lib/npm.js': npm, + '../../../lib/utils/reify-output.js': reifyOutput, +}) + +t.test('should not write if not global', async t => { + expectWrite = false + await reifyFinish({ + options: { global: false }, + actualTree: {}, + }) +}) + +t.test('should not write if no global npm module', async t => { + expectWrite = false + await reifyFinish({ + options: { global: true }, + actualTree: { + inventory: new Map(), + }, + }) +}) + +t.test('should not write if builtin conf had load error', async t => { + expectWrite = false + await reifyFinish({ + options: { global: true }, + actualTree: { + inventory: new Map([['node_modules/npm', {}]]), + }, + }) +}) + +t.test('should write if everything above passes', async t => { + expectWrite = true + delete builtinConfMock.loadError + const path = t.testdir() + await reifyFinish({ + options: { global: true }, + actualTree: { + inventory: new Map([['node_modules/npm', {path}]]), + }, + }) + // windowwwwwwssss!!!!! + const data = fs.readFileSync(`${path}/npmrc`, 'utf8').replace(/\r\n/g, '\n') + t.matchSnapshot(data, 'written config') +}) diff --git a/deps/npm/test/lib/utils/reify-output.js b/deps/npm/test/lib/utils/reify-output.js index 55f77f1d9d3a74..b905c9ab0f30f1 100644 --- a/deps/npm/test/lib/utils/reify-output.js +++ b/deps/npm/test/lib/utils/reify-output.js @@ -9,18 +9,18 @@ log.level = 'warn' t.cleanSnapshot = str => str.replace(/in [0-9]+m?s/g, 'in {TIME}') const settings = { - fund: true + fund: true, } const npmock = { started: Date.now(), - flatOptions: settings + flatOptions: settings, } const getReifyOutput = tester => requireInject( '../../../lib/utils/reify-output.js', { '../../../lib/npm.js': npmock, - '../../../lib/utils/output.js': tester + '../../../lib/utils/output.js': tester, } ) @@ -36,11 +36,11 @@ t.test('missing info', (t) => { reifyOutput({ actualTree: { - children: [] + children: [], }, diff: { - children: [] - } + children: [], + }, }) }) @@ -56,12 +56,11 @@ t.test('even more missing info', t => { reifyOutput({ actualTree: { - children: [] - } + children: [], + }, }) }) - t.test('single package', (t) => { t.plan(1) const reifyOutput = getReifyOutput( @@ -81,14 +80,14 @@ t.test('single package', (t) => { // the command is not 'audit' auditReport: { error: { - message: 'no audit for youuuuu' - } + message: 'no audit for youuuuu', + }, }, actualTree: { name: 'foo', package: { name: 'foo', - version: '1.0.0' + version: '1.0.0', }, edgesOut: new Map([ ['bar', { @@ -97,26 +96,27 @@ t.test('single package', (t) => { package: { name: 'bar', version: '1.0.0', - funding: { type: 'foo', url: 'http://example.com' } - } - } - }] - ]) + funding: { type: 'foo', url: 'http://example.com' }, + }, + }, + }], + ]), }, diff: { - children: [] - } + children: [], + }, }) }) t.test('no message when funding config is false', (t) => { - t.teardown(() => { settings.fund = true }) + t.teardown(() => { + settings.fund = true + }) settings.fund = false const reifyOutput = getReifyOutput( out => { - if (out.endsWith('looking for funding')) { + if (out.endsWith('looking for funding')) t.fail('should not print funding info', { actual: out }) - } } ) @@ -125,7 +125,7 @@ t.test('no message when funding config is false', (t) => { name: 'foo', package: { name: 'foo', - version: '1.0.0' + version: '1.0.0', }, edgesOut: new Map([ ['bar', { @@ -134,15 +134,15 @@ t.test('no message when funding config is false', (t) => { package: { name: 'bar', version: '1.0.0', - funding: { type: 'foo', url: 'http://example.com' } - } - } - }] - ]) + funding: { type: 'foo', url: 'http://example.com' }, + }, + }, + }], + ]), }, diff: { - children: [] - } + children: [], + }, }) t.end() @@ -167,7 +167,7 @@ t.test('print appropriate message for many packages', (t) => { name: 'foo', package: { name: 'foo', - version: '1.0.0' + version: '1.0.0', }, edgesOut: new Map([ ['bar', { @@ -176,9 +176,9 @@ t.test('print appropriate message for many packages', (t) => { package: { name: 'bar', version: '1.0.0', - funding: { type: 'foo', url: 'http://example.com' } - } - } + funding: { type: 'foo', url: 'http://example.com' }, + }, + }, }], ['lorem', { to: { @@ -186,9 +186,9 @@ t.test('print appropriate message for many packages', (t) => { package: { name: 'lorem', version: '1.0.0', - funding: { type: 'foo', url: 'http://example.com' } - } - } + funding: { type: 'foo', url: 'http://example.com' }, + }, + }, }], ['ipsum', { to: { @@ -196,15 +196,15 @@ t.test('print appropriate message for many packages', (t) => { package: { name: 'ipsum', version: '1.0.0', - funding: { type: 'foo', url: 'http://example.com' } - } - } - }] - ]) + funding: { type: 'foo', url: 'http://example.com' }, + }, + }, + }], + ]), }, diff: { - children: [] - } + children: [], + }, }) }) @@ -217,19 +217,21 @@ t.test('no output when silent', t => { reifyOutput({ actualTree: { inventory: { size: 999 }, children: [] }, auditReport: { - toJSON: () => mock.auditReport, + toJSON: () => { + throw new Error('this should not get called') + }, vulnerabilities: {}, metadata: { vulnerabilities: { - total: 99 - } - } + total: 99, + }, + }, }, diff: { children: [ - { action: 'ADD', ideal: { location: 'loc' } } - ] - } + { action: 'ADD', ideal: { location: 'loc' } }, + ], + }, }) t.end() }) @@ -251,22 +253,22 @@ t.test('packages changed message', t => { vulnerabilities: {}, metadata: { vulnerabilities: { - total: 0 - } - } + total: 0, + }, + }, } : null, diff: { children: [ - { action: 'some random unexpected junk' } - ] - } + { action: 'some random unexpected junk' }, + ], + }, } - for (let i = 0; i < added; i++) { + for (let i = 0; i < added; i++) mock.diff.children.push({ action: 'ADD', ideal: { location: 'loc' } }) - } - for (let i = 0; i < removed; i++) { + + for (let i = 0; i < removed; i++) mock.diff.children.push({ action: 'REMOVE', actual: { location: 'loc' } }) - } + for (let i = 0; i < changed; i++) { const actual = { location: 'loc' } const ideal = { location: 'loc' } @@ -279,7 +281,7 @@ t.test('packages changed message', t => { removed, changed, audited, - json + json, })) } @@ -288,9 +290,8 @@ t.test('packages changed message', t => { for (const removed of [0, 1, 2]) { for (const changed of [0, 1, 2]) { for (const audited of [0, 1, 2]) { - for (const json of [true, false]) { + for (const json of [true, false]) cases.push([added, removed, changed, audited, json, 'install']) - } } } } @@ -301,9 +302,8 @@ t.test('packages changed message', t => { cases.push([0, 0, 0, 2, false, 'audit']) t.plan(cases.length) - for (const [added, removed, changed, audited, json, command] of cases) { + for (const [added, removed, changed, audited, json, command] of cases) testCase(t, added, removed, changed, audited, json, command) - } t.end() }) @@ -319,14 +319,14 @@ t.test('added packages should be looked up within returned tree', t => { actualTree: { name: 'foo', inventory: { - has: () => true - } + has: () => true, + }, }, diff: { children: [ - { action: 'ADD', ideal: { name: 'baz' } } - ] - } + { action: 'ADD', ideal: { name: 'baz' } }, + ], + }, }) }) @@ -340,14 +340,14 @@ t.test('added packages should be looked up within returned tree', t => { actualTree: { name: 'foo', inventory: { - has: () => false - } + has: () => false, + }, }, diff: { children: [ - { action: 'ADD', ideal: { name: 'baz' } } - ] - } + { action: 'ADD', ideal: { name: 'baz' } }, + ], + }, }) }) t.end() diff --git a/deps/npm/test/lib/utils/setup-log.js b/deps/npm/test/lib/utils/setup-log.js index 2d5d794f1377a5..4398200abe22c0 100644 --- a/deps/npm/test/lib/utils/setup-log.js +++ b/deps/npm/test/lib/utils/setup-log.js @@ -1,15 +1,18 @@ const t = require('tap') const requireInject = require('require-inject') -const settings = {} +const settings = { + level: 'warn', +} t.afterEach(cb => { - Object.keys(settings).forEach(k => { delete settings[k] }) + Object.keys(settings).forEach(k => { + delete settings[k] + }) cb() }) const WARN_CALLED = [] const npmlog = { - level: 'warn', warn: (...args) => { WARN_CALLED.push(args) }, @@ -22,17 +25,39 @@ const npmlog = { notice: 3500, warn: 4000, error: 5000, - silent: Infinity + silent: Infinity, }, settings, - enableColor: () => { settings.color = true }, - disableColor: () => { settings.color = false }, - enableUnicode: () => { settings.unicode = true }, - disableUnicode: () => { settings.unicode = false }, - enableProgress: () => { settings.progress = true }, - disableProgress: () => { settings.progress = false }, - set heading (h) { settings.heading = h }, - set level (l) { settings.level = l } + enableColor: () => { + settings.color = true + }, + disableColor: () => { + settings.color = false + }, + enableUnicode: () => { + settings.unicode = true + }, + disableUnicode: () => { + settings.unicode = false + }, + enableProgress: () => { + settings.progress = true + }, + disableProgress: () => { + settings.progress = false + }, + get heading () { + return settings.heading + }, + set heading (h) { + settings.heading = h + }, + get level () { + return settings.level + }, + set level (l) { + settings.level = l + }, } const EXPLAIN_CALLED = [] @@ -41,9 +66,9 @@ const setupLog = requireInject('../../../lib/utils/setup-log.js', { explain: (...args) => { EXPLAIN_CALLED.push(args) return 'explanation' - } + }, }, - npmlog + npmlog, }) const config = obj => ({ @@ -52,7 +77,7 @@ const config = obj => ({ }, set (k, v) { obj[k] = v - } + }, }) t.test('setup with color=always and unicode', t => { @@ -65,7 +90,7 @@ t.test('setup with color=always and unicode', t => { loglevel: 'warn', color: 'always', unicode: true, - progress: false + progress: false, })), true) npmlog.warn('ERESOLVE', 'hello', { some: { other: 'object' } }) @@ -73,7 +98,7 @@ t.test('setup with color=always and unicode', t => { 'log.warn(ERESOLVE) patched to call explainEresolve()') t.strictSame(WARN_CALLED, [ ['ERESOLVE', 'hello'], - ['', 'explanation'] + ['', 'explanation'], ], 'warn the explanation') EXPLAIN_CALLED.length = 0 WARN_CALLED.length = 0 @@ -86,7 +111,7 @@ t.test('setup with color=always and unicode', t => { color: true, unicode: true, progress: false, - heading: 'npm' + heading: 'npm', }) t.end() @@ -106,7 +131,7 @@ t.test('setup with color=true, no unicode, and non-TTY terminal', t => { loglevel: 'warn', color: false, progress: false, - heading: 'asdf' + heading: 'asdf', })), false) t.strictSame(settings, { @@ -114,7 +139,7 @@ t.test('setup with color=true, no unicode, and non-TTY terminal', t => { color: false, unicode: false, progress: false, - heading: 'asdf' + heading: 'asdf', }) t.end() @@ -137,7 +162,7 @@ t.test('setup with color=true, no unicode, and dumb TTY terminal', t => { loglevel: 'warn', color: true, progress: false, - heading: 'asdf' + heading: 'asdf', })), true) t.strictSame(settings, { @@ -145,7 +170,7 @@ t.test('setup with color=true, no unicode, and dumb TTY terminal', t => { color: true, unicode: false, progress: false, - heading: 'asdf' + heading: 'asdf', }) t.end() @@ -168,7 +193,7 @@ t.test('setup with color=true, no unicode, and non-dumb TTY terminal', t => { loglevel: 'warn', color: true, progress: true, - heading: 'asdf' + heading: 'asdf', })), true) t.strictSame(settings, { @@ -176,7 +201,7 @@ t.test('setup with color=true, no unicode, and non-dumb TTY terminal', t => { color: true, unicode: false, progress: true, - heading: 'asdf' + heading: 'asdf', }) t.end() @@ -199,7 +224,7 @@ t.test('setup with non-TTY stdout, TTY stderr', t => { loglevel: 'warn', color: true, progress: true, - heading: 'asdf' + heading: 'asdf', })), false) t.strictSame(settings, { @@ -207,7 +232,7 @@ t.test('setup with non-TTY stdout, TTY stderr', t => { color: true, unicode: false, progress: true, - heading: 'asdf' + heading: 'asdf', }) t.end() @@ -229,7 +254,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => { loglevel: 'warn', color: true, progress: true, - heading: 'asdf' + heading: 'asdf', })), true) t.strictSame(settings, { @@ -237,7 +262,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => { color: false, unicode: false, progress: false, - heading: 'asdf' + heading: 'asdf', }) t.end() @@ -246,7 +271,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => { t.test('set loglevel to timing', t => { setupLog(config({ timing: true, - loglevel: 'notice' + loglevel: 'notice', })) t.equal(settings.level, 'timing') t.end() @@ -266,7 +291,7 @@ t.test('silent has no logging', t => { process.env.TERM = 'totes not dum' setupLog(config({ - loglevel: 'silent' + loglevel: 'silent', })) t.equal(settings.progress, false, 'progress disabled when silent') t.end() diff --git a/deps/npm/test/lib/utils/tar.js b/deps/npm/test/lib/utils/tar.js index 827bc9262db825..b780a73e5ec1c7 100644 --- a/deps/npm/test/lib/utils/tar.js +++ b/deps/npm/test/lib/utils/tar.js @@ -11,9 +11,9 @@ const printLogs = (tarball, unicode) => { log: { notice: (...args) => { args.map(el => logs.push(el)) - } + }, }, - unicode + unicode, }) return logs.join('\n') } @@ -24,19 +24,19 @@ test('should log tarball contents', async (t) => { name: 'my-cool-pkg', version: '1.0.0', bundleDependencies: [ - 'bundle-dep' - ] + 'bundle-dep', + ], }, null, 2), - 'node_modules': { - 'bundle-dep': 'toto' - } + node_modules: { + 'bundle-dep': 'toto', + }, }) const tarball = await pack(testDir) const tarballContents = await getContents({ _id: '1', name: 'my-cool-pkg', - version: '1.0.0' + version: '1.0.0', }, tarball) t.matchSnapshot(printLogs(tarballContents, false)) @@ -44,36 +44,36 @@ test('should log tarball contents', async (t) => { test('should log tarball contents with unicode', async (t) => { const { logTar } = requireInject('../../../lib/utils/tar.js', { - 'npmlog': { - 'notice': (str) => { + npmlog: { + notice: (str) => { t.ok(true, 'defaults to npmlog') return str - } - } + }, + }, }) - - logTar({ - files: [], + + logTar({ + files: [], bundled: [], - integrity: '' + integrity: '', }, { unicode: true }) t.end() }) test('should default to npmlog', async (t) => { const { logTar } = requireInject('../../../lib/utils/tar.js', { - 'npmlog': { - 'notice': (str) => { + npmlog: { + notice: (str) => { t.ok(true, 'defaults to npmlog') return str - } - } + }, + }, }) logTar({ files: [], bundled: [], - integrity: '' + integrity: '', }) t.end() }) @@ -82,19 +82,19 @@ test('should getContents of a tarball', async (t) => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: 'my-cool-pkg', - version: '1.0.0' - }, null, 2) + version: '1.0.0', + }, null, 2), }) const tarball = await pack(testDir) const tarballContents = await getContents({ name: 'my-cool-pkg', - version: '1.0.0' + version: '1.0.0', }, tarball) const integrity = await ssri.fromData(tarball, { - algorithms: ['sha1', 'sha512'] + algorithms: ['sha1', 'sha512'], }) t.strictSame(tarballContents, { @@ -106,10 +106,9 @@ test('should getContents of a tarball', async (t) => { shasum: 'c0bfd67a5142104e429afda09119eedd6a30d2fc', integrity: ssri.parse(integrity.sha512[0]), filename: 'my-cool-pkg-1.0.0.tgz', - files: [ { path: 'package.json', size: 49, mode: 420 } ], + files: [{ path: 'package.json', size: 49, mode: 420 }], entryCount: 1, - bundled: [] + bundled: [], }, 'contents are correct') t.end() - }) diff --git a/deps/npm/test/lib/utils/unsupported.js b/deps/npm/test/lib/utils/unsupported.js index 89ee6af2321d7d..f14cba9b744adb 100644 --- a/deps/npm/test/lib/utils/unsupported.js +++ b/deps/npm/test/lib/utils/unsupported.js @@ -30,7 +30,7 @@ const versions = [ ['v10.0.0-0', false, false], ['v11.0.0-0', false, false], ['v12.0.0-0', false, false], - ['v13.0.0-0', false, false] + ['v13.0.0-0', false, false], ] test('versions', function (t) { @@ -71,7 +71,7 @@ test('checkForBrokenNode', t => { const expectLogs = [ 'ERROR: npm is known not to run on Node.js 1.2.3', "You'll need to upgrade to a newer Node.js version in order to use this", - 'version of npm. You can find the latest version at https://nodejs.org/' + 'version of npm. You can find the latest version at https://nodejs.org/', ] console.error = msg => logs.push(msg) unsupported.checkForBrokenNode() @@ -92,7 +92,7 @@ test('checkForUnsupportedNode', t => { 'npm does not support Node.js 8.0.0', 'You should probably upgrade to a newer version of node as we', "can't make any promises that npm will work with this version.", - 'You can find the latest version at https://nodejs.org/' + 'You can find the latest version at https://nodejs.org/', ] npmlog.warn = (section, msg) => logs.push(msg) diff --git a/deps/npm/test/lib/utils/update-notifier.js b/deps/npm/test/lib/utils/update-notifier.js index 903e888a5e0f7d..99c9dfc26626f1 100644 --- a/deps/npm/test/lib/utils/update-notifier.js +++ b/deps/npm/test/lib/utils/update-notifier.js @@ -22,15 +22,15 @@ const pacote = { process.exit(1) } MANIFEST_REQUEST.push(spec) - if (PACOTE_ERROR) { + if (PACOTE_ERROR) throw PACOTE_ERROR - } + return { version: spec === 'npm@latest' ? CURRENT_VERSION - : /-/.test(spec) ? CURRENT_BETA - : NEXT_VERSION + : /-/.test(spec) ? CURRENT_BETA + : NEXT_VERSION, } - } + }, } const npm = { @@ -38,13 +38,12 @@ const npm = { log: { useColor: () => true }, version: CURRENT_VERSION, config: { get: (k) => k !== 'global' }, - flatOptions, command: 'view', - argv: ['npm'] + argv: ['npm'], } const npmNoColor = { ...npm, - log: { useColor: () => false } + log: { useColor: () => false }, } const { basename } = require('path') @@ -70,17 +69,15 @@ const fs = { process.exit(1) } process.nextTick(() => cb(WRITE_ERROR)) - } + }, } const updateNotifier = requireInject('../../../lib/utils/update-notifier.js', { '@npmcli/ci-detect': () => ciMock, pacote, - fs + fs, }) -const semver = require('semver') - t.afterEach(cb => { MANIFEST_REQUEST.length = 0 STAT_ERROR = null @@ -94,7 +91,7 @@ t.test('situations in which we do not notify', t => { t.test('nothing to do if notifier disabled', async t => { t.equal(await updateNotifier({ ...npm, - config: { get: (k) => k === 'update-notifier' ? false : true } + config: { get: (k) => k !== 'update-notifier' }, }), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) @@ -104,7 +101,7 @@ t.test('situations in which we do not notify', t => { ...npm, flatOptions: { ...flatOptions, global: true }, command: 'install', - argv: ['npm'] + argv: ['npm'], }), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) @@ -140,7 +137,9 @@ t.test('situations in which we do not notify', t => { }) t.test('do not update in CI', async t => { - t.teardown(() => { ciMock = null }) + t.teardown(() => { + ciMock = null + }) ciMock = 'something' t.equal(await updateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') @@ -148,14 +147,14 @@ t.test('situations in which we do not notify', t => { t.test('only check weekly for GA releases', async t => { // the 10 is fuzz factor for test environment - STAT_MTIME = Date.now() - (1000*60*60*24*7) + 10 + STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24 * 7) + 10 t.equal(await updateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('only check daily for betas', async t => { // the 10 is fuzz factor for test environment - STAT_MTIME = Date.now() - (1000*60*60*24) + 10 + STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24) + 10 t.equal(await updateNotifier({ ...npm, version: HAVE_BETA }), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) diff --git a/deps/npm/test/lib/view.js b/deps/npm/test/lib/view.js index 88b2769a058990..f3e5d97f333605 100644 --- a/deps/npm/test/lib/view.js +++ b/deps/npm/test/lib/view.js @@ -13,227 +13,225 @@ const cleanLogs = (done) => { } const packument = (nv, opts) => { - if (!opts.fullMetadata) { + if (!opts.fullMetadata) throw new Error('must fetch fullMetadata') - } - if (!opts.preferOnline) { + if (!opts.preferOnline) throw new Error('must fetch with preferOnline') - } const mocks = { - 'red': { - 'name' : 'red', + red: { + name: 'red', 'dist-tags': { - '1.0.1': {} + '1.0.1': {}, + }, + time: { + unpublished: new Date(), }, - 'time': { - 'unpublished': new Date() - } }, - 'blue': { - 'name': 'blue', + blue: { + name: 'blue', 'dist-tags': {}, - 'time': { - '1.0.0': '2019-08-06T16:21:09.842Z' + time: { + '1.0.0': '2019-08-06T16:21:09.842Z', }, - 'versions': { + versions: { '1.0.0': { - 'name': 'blue', - 'version': '1.0.0', - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.blue.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } + name: 'blue', + version: '1.0.0', + dist: { + shasum: '123', + tarball: 'http://hm.blue.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, + }, }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'cyan': { - '_npmUser': { - 'name': 'claudia', - 'email': 'claudia@cyan.com' - } , - 'name': 'cyan', + cyan: { + _npmUser: { + name: 'claudia', + email: 'claudia@cyan.com', + }, + name: 'cyan', 'dist-tags': {}, - 'versions': { + versions: { '1.0.0': { - 'version': '1.0.0', - 'name': 'cyan', - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.cyan.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } + version: '1.0.0', + name: 'cyan', + dist: { + shasum: '123', + tarball: 'http://hm.cyan.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, + }, }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'brown': { - 'name': 'brown' + brown: { + name: 'brown', }, - 'yellow': { - '_id': 'yellow', - 'name': 'yellow', - 'author': { - 'name': 'foo', - 'email': 'foo@yellow.com', - 'twitter': 'foo' + yellow: { + _id: 'yellow', + name: 'yellow', + author: { + name: 'foo', + email: 'foo@yellow.com', + twitter: 'foo', }, - 'readme': 'a very useful readme', - 'versions': { + readme: 'a very useful readme', + versions: { '1.0.0': { - 'version': '1.0.0', - 'author': 'claudia', - 'readme': 'a very useful readme', - 'maintainers': [ - { 'name': 'claudia', 'email': 'c@yellow.com', 'twitter': 'cyellow' }, - { 'name': 'isaacs', 'email': 'i@yellow.com', 'twitter': 'iyellow' } - ] + version: '1.0.0', + author: 'claudia', + readme: 'a very useful readme', + maintainers: [ + { name: 'claudia', email: 'c@yellow.com', twitter: 'cyellow' }, + { name: 'isaacs', email: 'i@yellow.com', twitter: 'iyellow' }, + ], }, '1.0.1': { - 'version': '1.0.1', - 'author': 'claudia' + version: '1.0.1', + author: 'claudia', }, '1.0.2': { - 'version': '1.0.2', - 'author': 'claudia' - } - } + version: '1.0.2', + author: 'claudia', + }, + }, }, - 'purple': { - 'name': 'purple', - 'versions': { + purple: { + name: 'purple', + versions: { '1.0.0': { - 'foo': 1, - 'maintainers': [ - { 'name': 'claudia' } - ] + foo: 1, + maintainers: [ + { name: 'claudia' }, + ], }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'green': { - '_id': 'green', - 'name': 'green', + green: { + _id: 'green', + name: 'green', 'dist-tags': { - 'latest': '1.0.0' + latest: '1.0.0', }, - 'maintainers': [ - { 'name': 'claudia', 'email': 'c@yellow.com', 'twitter': 'cyellow' }, - { 'name': 'isaacs', 'email': 'i@yellow.com', 'twitter': 'iyellow' } + maintainers: [ + { name: 'claudia', email: 'c@yellow.com', twitter: 'cyellow' }, + { name: 'isaacs', email: 'i@yellow.com', twitter: 'iyellow' }, ], - 'keywords': ['colors', 'green', 'crayola'], - 'versions': { + keywords: ['colors', 'green', 'crayola'], + versions: { '1.0.0': { - '_id': 'green', - 'version': '1.0.0', - 'description': 'green is a very important color', - 'bugs': { - 'url': 'http://bugs.green.com' + _id: 'green', + version: '1.0.0', + description: 'green is a very important color', + bugs: { + url: 'http://bugs.green.com', + }, + deprecated: true, + repository: { + url: 'http://repository.green.com', }, - 'deprecated': true, - 'repository': { - 'url': 'http://repository.green.com' + license: { type: 'ACME' }, + bin: { + green: 'bin/green.js', }, - 'license': { type: 'ACME' }, - 'bin': { - 'green': 'bin/green.js' + dependencies: { + red: '1.0.0', + yellow: '1.0.0', }, - 'dependencies': { - 'red': '1.0.0', - 'yellow': '1.0.0' + dist: { + shasum: '123', + tarball: 'http://hm.green.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, }, - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.green.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'black': { - 'name': 'black', + black: { + name: 'black', 'dist-tags': { - 'latest': '1.0.0' + latest: '1.0.0', }, - 'versions': { + versions: { '1.0.0': { - 'version': '1.0.0', - 'bugs': 'http://bugs.black.com', - 'license': {}, - 'dependencies': (() => { + version: '1.0.0', + bugs: 'http://bugs.black.com', + license: {}, + dependencies: (() => { const deps = {} - for (i = 0; i < 25; i++) { + for (let i = 0; i < 25; i++) deps[i] = '1.0.0' - } + return deps })(), - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.black.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } + dist: { + shasum: '123', + tarball: 'http://hm.black.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, + }, }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'pink': { - 'name': 'pink', + pink: { + name: 'pink', 'dist-tags': { - 'latest': '1.0.0' + latest: '1.0.0', }, - 'versions': { + versions: { '1.0.0': { - 'version': '1.0.0', - 'maintainers': [ - { 'name': 'claudia', 'url': 'http://c.pink.com' }, - { 'name': 'isaacs', 'url': 'http://i.pink.com' } + version: '1.0.0', + maintainers: [ + { name: 'claudia', url: 'http://c.pink.com' }, + { name: 'isaacs', url: 'http://i.pink.com' }, ], - 'repository': 'http://repository.pink.com', - 'license': {}, - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.pink.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } + repository: 'http://repository.pink.com', + license: {}, + dist: { + shasum: '123', + tarball: 'http://hm.pink.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, + }, }, - '1.0.1': {} - } + '1.0.1': {}, + }, }, - 'orange': { - 'name': 'orange', + orange: { + name: 'orange', 'dist-tags': { - 'latest': '1.0.0' + latest: '1.0.0', }, - 'versions': { + versions: { '1.0.0': { - 'version': '1.0.0', - 'homepage': 'http://hm.orange.com', - 'license': {}, - 'dist': { - 'shasum': '123', - 'tarball': 'http://hm.orange.com/1.0.0.tgz', - 'integrity': '---', - 'fileCount': 1, - 'unpackedSize': 1 - } + version: '1.0.0', + homepage: 'http://hm.orange.com', + license: {}, + dist: { + shasum: '123', + tarball: 'http://hm.orange.com/1.0.0.tgz', + integrity: '---', + fileCount: 1, + unpackedSize: 1, + }, }, - '1.0.1': {} - } - } + '1.0.1': {}, + }, + }, } return mocks[nv.name] } @@ -244,34 +242,34 @@ t.test('should log package info', t => { '../../lib/npm.js': { flatOptions: { global: false, - } + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) const viewJson = requireInject('../../lib/view.js', { '../../lib/npm.js': { flatOptions: { - json: true - } + json: true, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) const viewUnicode = requireInject('../../lib/view.js', { '../../lib/npm.js': { flatOptions: { global: false, - unicode: true - } + unicode: true, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) t.test('package with license, bugs, repository and other fields', t => { @@ -344,8 +342,8 @@ t.test('should log info of package in current working dir', t => { const testDir = t.testdir({ 'package.json': JSON.stringify({ name: 'blue', - version: '1.0.0' - }, null, 2) + version: '1.0.0', + }, null, 2), }) const view = requireInject('../../lib/view.js', { @@ -353,12 +351,12 @@ t.test('should log info of package in current working dir', t => { prefix: testDir, flatOptions: { defaultTag: '1.0.0', - global: false - } + global: false, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) t.test('specific version', t => { @@ -383,23 +381,23 @@ t.test('should log info by field name', t => { '../../lib/npm.js': { flatOptions: { json: true, - global: false - } + global: false, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) const view = requireInject('../../lib/view.js', { '../../lib/npm.js': { flatOptions: { - global: false - } + global: false, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) t.test('readme', t => { @@ -452,14 +450,14 @@ t.test('should log info by field name', t => { }) t.test('array field - 1 element', t => { - view(['purple@1.0.0', 'maintainers.name'], () => { + view(['purple@1.0.0', 'maintainers.name'], () => { t.matchSnapshot(logs) t.end() }) }) t.test('array field - 2 elements', t => { - view(['yellow@1.x.x', 'maintainers.name'], () => { + view(['yellow@1.x.x', 'maintainers.name'], () => { t.matchSnapshot(logs) t.end() }) @@ -472,9 +470,9 @@ t.test('throw error if global mode', (t) => { const view = requireInject('../../lib/view.js', { '../../lib/npm.js': { flatOptions: { - global: true - } - } + global: true, + }, + }, }) view([], (err) => { t.equals(err.message, 'Cannot use view command in global mode.') @@ -489,9 +487,9 @@ t.test('throw ENOENT error if package.json misisng', (t) => { '../../lib/npm.js': { prefix: testDir, flatOptions: { - global: false - } - } + global: false, + }, + }, }) view([], (err) => { t.match(err, { code: 'ENOENT' }) @@ -501,16 +499,16 @@ t.test('throw ENOENT error if package.json misisng', (t) => { t.test('throw EJSONPARSE error if package.json not json', (t) => { const testDir = t.testdir({ - 'package.json': 'not json, nope, not even a little bit!' + 'package.json': 'not json, nope, not even a little bit!', }) const view = requireInject('../../lib/view.js', { '../../lib/npm.js': { prefix: testDir, flatOptions: { - global: false - } - } + global: false, + }, + }, }) view([], (err) => { t.match(err, { code: 'EJSONPARSE' }) @@ -520,16 +518,16 @@ t.test('throw EJSONPARSE error if package.json not json', (t) => { t.test('throw error if package.json has no name', (t) => { const testDir = t.testdir({ - 'package.json': '{}' + 'package.json': '{}', }) const view = requireInject('../../lib/view.js', { '../../lib/npm.js': { prefix: testDir, flatOptions: { - global: false - } - } + global: false, + }, + }, }) view([], (err) => { t.equals(err.message, 'Invalid package.json, no "name" field') @@ -542,12 +540,12 @@ t.test('throws when unpublished', (t) => { '../../lib/npm.js': { flatOptions: { defaultTag: '1.0.1', - global: false - } + global: false, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) view(['red'], (err) => { t.equals(err.code, 'E404') @@ -560,16 +558,18 @@ t.test('completion', (t) => { '../../lib/npm.js': { flatOptions: { defaultTag: '1.0.1', - global: false - } + global: false, + }, + }, + pacote: { + packument, }, - 'pacote': { - packument - } }) view.completion({ - conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } } + conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } }, }, (err, res) => { + if (err) + throw err t.ok(res, 'returns back fields') t.end() }) @@ -580,14 +580,13 @@ t.test('no registry completion', (t) => { '../../lib/npm.js': { flatOptions: { defaultTag: '1.0.1', - } - } + }, + }, }) view.completion({ - conf: { argv: { remain: ['npm', 'view'] } } + conf: { argv: { remain: ['npm', 'view'] } }, }, (err) => { t.notOk(err, 'there is no package completion') t.end() }) }) - diff --git a/deps/npm/test/lib/whoami.js b/deps/npm/test/lib/whoami.js index dc9dbdfd570ff2..d54814db365e7f 100644 --- a/deps/npm/test/lib/whoami.js +++ b/deps/npm/test/lib/whoami.js @@ -8,7 +8,7 @@ test('whoami', (t) => { '../../lib/npm.js': { flatOptions: {} }, '../../lib/utils/output.js': (output) => { t.equal(output, 'foo', 'should output the username') - } + }, }) whoami([], (err) => { @@ -24,7 +24,7 @@ test('whoami json', (t) => { '../../lib/npm.js': { flatOptions: { json: true } }, '../../lib/utils/output.js': (output) => { t.equal(output, '"foo"', 'should output the username as json') - } + }, }) whoami([], (err) => { From a6a90af8c058e2ab3d26c67e8e1fd44bf2b59fee Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Tue, 22 Sep 2020 14:33:44 -0700 Subject: [PATCH 82/98] doc: add compatibility/interop technical value PR-URL: https://github.com/nodejs/node/pull/35323 Reviewed-By: Stephen Belanger Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- doc/guides/technical-values.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/guides/technical-values.md b/doc/guides/technical-values.md index d79fde6a461021..fd4e18a485af10 100644 --- a/doc/guides/technical-values.md +++ b/doc/guides/technical-values.md @@ -9,7 +9,7 @@ collaboration. * Priority 2 - Stability * Priority 3 - Operational qualities * Priority 4 - Node.js maintainer experience -* Priority 5 - Up to date Technology and APIs +* Priority 5 - Up to date technology and APIs ## Value descriptions @@ -20,6 +20,9 @@ with Node.js. Some key elements of this include: * Great documentation * Bundling friction-reducing APIs and components, even though they could be provided externally +* Compatibility and interoperability with browsers and other JavaScript + environments so that as much code as possible runs as is both in Node.js and + in the other environments * Enabling/supporting external packages to ensure overall developer experience ### 2 - Stability From 8a0c3b9c76b9419a5945b3be42bb8846847ffe2d Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Tue, 17 Nov 2020 00:49:27 +0100 Subject: [PATCH 83/98] http2: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36142 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Stephen Belanger Reviewed-By: Ricky Zhou <0x19951125@gmail.com> Reviewed-By: Rich Trott --- lib/internal/http2/compat.js | 31 ++++++---- lib/internal/http2/core.js | 111 +++++++++++++++++++++-------------- lib/internal/http2/util.js | 27 +++++---- 3 files changed, 103 insertions(+), 66 deletions(-) diff --git a/lib/internal/http2/compat.js b/lib/internal/http2/compat.js index e3cc240da1e1c6..a0251ffbafecd2 100644 --- a/lib/internal/http2/compat.js +++ b/lib/internal/http2/compat.js @@ -2,12 +2,17 @@ const { ArrayIsArray, + ArrayPrototypePush, Boolean, + FunctionPrototypeBind, ObjectAssign, ObjectCreate, ObjectKeys, ObjectPrototypeHasOwnProperty, + ReflectApply, ReflectGetPrototypeOf, + StringPrototypeToLowerCase, + StringPrototypeTrim, Symbol, } = primordials; @@ -138,7 +143,7 @@ function onStreamTrailers(trailers, flags, rawTrailers) { const request = this[kRequest]; if (request !== undefined) { ObjectAssign(request[kTrailers], trailers); - request[kRawTrailers].push(...rawTrailers); + ArrayPrototypePush(request[kRawTrailers], ...rawTrailers); } } @@ -200,7 +205,7 @@ const proxySocketHandler = { case 'end': case 'emit': case 'destroy': - return stream[prop].bind(stream); + return FunctionPrototypeBind(stream[prop], stream); case 'writable': case 'destroyed': return stream[prop]; @@ -212,8 +217,8 @@ const proxySocketHandler = { case 'setTimeout': const session = stream.session; if (session !== undefined) - return session.setTimeout.bind(session); - return stream.setTimeout.bind(stream); + return FunctionPrototypeBind(session.setTimeout, session); + return FunctionPrototypeBind(stream.setTimeout, stream); case 'write': case 'read': case 'pause': @@ -223,7 +228,9 @@ const proxySocketHandler = { const ref = stream.session !== undefined ? stream.session[kSocket] : stream; const value = ref[prop]; - return typeof value === 'function' ? value.bind(ref) : value; + return typeof value === 'function' ? + FunctionPrototypeBind(value, ref) : + value; } }, getPrototypeOf(stream) { @@ -394,7 +401,7 @@ class Http2ServerRequest extends Readable { set method(method) { validateString(method, 'method'); - if (method.trim() === '') + if (StringPrototypeTrim(method) === '') throw new ERR_INVALID_ARG_VALUE('method', method); this[kHeaders][HTTP2_HEADER_METHOD] = method; @@ -554,7 +561,7 @@ class Http2ServerResponse extends Stream { setTrailer(name, value) { validateString(name, 'name'); - name = name.trim().toLowerCase(); + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); assertValidHeader(name, value); this[kTrailers][name] = value; } @@ -570,7 +577,7 @@ class Http2ServerResponse extends Stream { getHeader(name) { validateString(name, 'name'); - name = name.trim().toLowerCase(); + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); return this[kHeaders][name]; } @@ -585,7 +592,7 @@ class Http2ServerResponse extends Stream { hasHeader(name) { validateString(name, 'name'); - name = name.trim().toLowerCase(); + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); return ObjectPrototypeHasOwnProperty(this[kHeaders], name); } @@ -594,7 +601,7 @@ class Http2ServerResponse extends Stream { if (this[kStream].headersSent) throw new ERR_HTTP2_HEADERS_SENT(); - name = name.trim().toLowerCase(); + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); if (name === 'date') { this[kState].sendDate = false; @@ -614,7 +621,7 @@ class Http2ServerResponse extends Stream { } [kSetHeader](name, value) { - name = name.trim().toLowerCase(); + name = StringPrototypeToLowerCase(StringPrototypeTrim(name)); assertValidHeader(name, value); if (!isConnectionHeaderAllowed(name, value)) { @@ -755,7 +762,7 @@ class Http2ServerResponse extends Stream { this.writeHead(this[kState].statusCode); if (this[kState].closed || stream.destroyed) - onStreamCloseResponse.call(stream); + ReflectApply(onStreamCloseResponse, stream, []); else stream.end(); diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 84d0cd5d948b6b..2ea8eacd0c10a2 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -5,7 +5,9 @@ const { ArrayFrom, ArrayIsArray, - Map, + ArrayPrototypeForEach, + ArrayPrototypePush, + FunctionPrototypeBind, MathMin, ObjectAssign, ObjectCreate, @@ -13,10 +15,15 @@ const { ObjectDefineProperty, ObjectPrototypeHasOwnProperty, Promise, + PromisePrototypeCatch, ReflectApply, ReflectGetPrototypeOf, - Set, + RegExpPrototypeTest, + SafeMap, + SafeSet, + StringPrototypeSlice, Symbol, + TypedArrayPrototypeSet, Uint32Array, Uint8Array, } = primordials; @@ -34,10 +41,10 @@ const EventEmitter = require('events'); const fs = require('fs'); const http = require('http'); const { readUInt16BE, readUInt32BE } = require('internal/buffer'); +const { URL } = require('internal/url'); const net = require('net'); const { Duplex } = require('stream'); const tls = require('tls'); -const { URL } = require('url'); const { setImmediate } = require('timers'); const { @@ -393,7 +400,7 @@ function onSessionHeaders(handle, id, cat, flags, headers, sensitiveHeaders) { function tryClose(fd) { // Try to close the file descriptor. If closing fails, assert because // an error really should not happen at this point. - fs.close(fd, (err) => assert.ifError(err)); + fs.close(fd, assert.ifError); } // Called when the Http2Stream has finished sending data and is ready for @@ -601,7 +608,7 @@ function initOriginSet(session) { let originSet = session[kState].originSet; if (originSet === undefined) { const socket = session[kSocket]; - session[kState].originSet = originSet = new Set(); + session[kState].originSet = originSet = new SafeSet(); if (socket.servername != null) { let originString = `https://${socket.servername}`; if (socket.remotePort != null) @@ -789,7 +796,8 @@ function submitSettings(settings, callback) { debugSessionObj(this, 'submitting settings'); this[kUpdateTimer](); updateSettingsBuffer(settings); - if (!this[kHandle].settings(settingsCallback.bind(this, callback))) { + if (!this[kHandle].settings(FunctionPrototypeBind(settingsCallback, + this, callback))) { this.destroy(new ERR_HTTP2_MAX_PENDING_SETTINGS_ACK()); } } @@ -831,7 +839,7 @@ const proxySocketHandler = { case 'setTimeout': case 'ref': case 'unref': - return session[prop].bind(session); + return FunctionPrototypeBind(session[prop], session); case 'destroy': case 'emit': case 'end': @@ -848,7 +856,9 @@ const proxySocketHandler = { if (socket === undefined) throw new ERR_HTTP2_SOCKET_UNBOUND(); const value = socket[prop]; - return typeof value === 'function' ? value.bind(socket) : value; + return typeof value === 'function' ? + FunctionPrototypeBind(value, socket) : + value; } }, getPrototypeOf(session) { @@ -973,7 +983,7 @@ function setupHandle(socket, type, options) { this[kHandle] = handle; if (this[kNativeFields]) - handle.fields.set(this[kNativeFields]); + TypedArrayPrototypeSet(handle.fields, this[kNativeFields]); else this[kNativeFields] = handle.fields; @@ -1086,7 +1096,8 @@ function closeSession(session, code, error) { // Destroy the handle if it exists at this point. if (handle !== undefined) { - handle.ondone = finishSessionClose.bind(null, session, error); + handle.ondone = FunctionPrototypeBind(finishSessionClose, + null, session, error); handle.destroy(code, socket.destroyed); } else { finishSessionClose(session, error); @@ -1154,8 +1165,8 @@ class Http2Session extends EventEmitter { flags: SESSION_FLAGS_PENDING, goawayCode: null, goawayLastStreamID: null, - streams: new Map(), - pendingStreams: new Set(), + streams: new SafeMap(), + pendingStreams: new SafeSet(), pendingAck: 0, shutdownWritableCalled: false, writeQueueSize: 0, @@ -1178,7 +1189,8 @@ class Http2Session extends EventEmitter { if (typeof socket.disableRenegotiation === 'function') socket.disableRenegotiation(); - const setupFn = setupHandle.bind(this, socket, type, options); + const setupFn = FunctionPrototypeBind(setupHandle, this, + socket, type, options); if (socket.connecting || socket.secureConnecting) { const connectEvent = socket instanceof tls.TLSSocket ? 'secureConnect' : 'connect'; @@ -1398,7 +1410,8 @@ class Http2Session extends EventEmitter { this[kState].pendingAck++; - const settingsFn = submitSettings.bind(this, { ...settings }, callback); + const settingsFn = FunctionPrototypeBind(submitSettings, this, + { ...settings }, callback); if (this.connecting) { this.once('connect', settingsFn); return; @@ -1422,7 +1435,9 @@ class Http2Session extends EventEmitter { validateNumber(code, 'code'); validateNumber(lastStreamID, 'lastStreamID'); - const goawayFn = submitGoaway.bind(this, code, lastStreamID, opaqueData); + const goawayFn = FunctionPrototypeBind(submitGoaway, + this, + code, lastStreamID, opaqueData); if (this.connecting) { this.once('connect', goawayFn); return; @@ -1577,7 +1592,7 @@ class ServerHttp2Session extends Http2Session { } validateString(alt, 'alt'); - if (!kQuotedString.test(alt)) + if (!RegExpPrototypeTest(kQuotedString, alt)) throw new ERR_INVALID_CHAR('alt'); // Max length permitted for ALTSVC @@ -1668,7 +1683,8 @@ class ClientHttp2Session extends Http2Session { if (getAuthority(headers) === undefined) headers[HTTP2_HEADER_AUTHORITY] = this[kAuthority]; if (headers[HTTP2_HEADER_SCHEME] === undefined) - headers[HTTP2_HEADER_SCHEME] = this[kProtocol].slice(0, -1); + headers[HTTP2_HEADER_SCHEME] = StringPrototypeSlice(this[kProtocol], + 0, -1); if (headers[HTTP2_HEADER_PATH] === undefined) headers[HTTP2_HEADER_PATH] = '/'; } else { @@ -1705,14 +1721,15 @@ class ClientHttp2Session extends Http2Session { if (options.waitForTrailers) stream[kState].flags |= STREAM_FLAGS_HAS_TRAILERS; - const onConnect = requestOnConnect.bind(stream, headersList, options); + const onConnect = FunctionPrototypeBind(requestOnConnect, + stream, headersList, options); if (this.connecting) { if (this[kPendingRequestCalls] !== null) { - this[kPendingRequestCalls].push(onConnect); + ArrayPrototypePush(this[kPendingRequestCalls], onConnect); } else { this[kPendingRequestCalls] = [onConnect]; this.once('connect', () => { - this[kPendingRequestCalls].forEach((f) => f()); + ArrayPrototypeForEach(this[kPendingRequestCalls], (f) => f()); this[kPendingRequestCalls] = null; }); } @@ -1767,7 +1784,7 @@ function shutdownWritable(callback) { req.handle = handle; const err = handle.shutdown(req); if (err === 1) // synchronous finish - return afterShutdown.call(req, 0); + return ReflectApply(afterShutdown, req, [0]); } function finishSendTrailers(stream, headersList) { @@ -1816,7 +1833,7 @@ function closeStream(stream, code, rstStreamStatus = kSubmitRstStream) { } if (rstStreamStatus !== kNoRstStream) { - const finishFn = finishCloseStream.bind(stream, code); + const finishFn = FunctionPrototypeBind(finishCloseStream, stream, code); if (!ending || stream.writableFinished || code !== NGHTTP2_NO_ERROR || rstStreamStatus === kForceRstStream) finishFn(); @@ -1826,7 +1843,7 @@ function closeStream(stream, code, rstStreamStatus = kSubmitRstStream) { } function finishCloseStream(code) { - const rstStreamFn = submitRstStream.bind(this, code); + const rstStreamFn = FunctionPrototypeBind(submitRstStream, this, code); // If the handle has not yet been assigned, queue up the request to // ensure that the RST_STREAM frame is sent after the stream ID has // been determined. @@ -2010,7 +2027,8 @@ class Http2Stream extends Duplex { if (this.pending) { this.once( 'ready', - this[kWriteGeneric].bind(this, writev, data, encoding, cb) + FunctionPrototypeBind(this[kWriteGeneric], + this, writev, data, encoding, cb) ); return; } @@ -2089,7 +2107,7 @@ class Http2Stream extends Duplex { return; } debugStreamObj(this, 'shutting down writable on _final'); - shutdownWritable.call(this, cb); + ReflectApply(shutdownWritable, this, [cb]); } _read(nread) { @@ -2102,7 +2120,7 @@ class Http2Stream extends Duplex { this[kState].didRead = true; } if (!this.pending) { - streamOnResume.call(this); + ReflectApply(streamOnResume, this, []); } else { this.once('ready', streamOnResume); } @@ -2116,7 +2134,7 @@ class Http2Stream extends Duplex { options = { ...options }; setAndValidatePriorityOptions(options); - const priorityFn = submitPriority.bind(this, options); + const priorityFn = FunctionPrototypeBind(submitPriority, this, options); // If the handle has not yet been assigned, queue up the priority // frame to be sent as soon as the ready event is emitted. @@ -2346,7 +2364,8 @@ function processHeaders(oldHeaders, options) { function onFileUnpipe() { const stream = this.sink[kOwner]; if (stream.ownsFd) - this.source.close().catch(stream.destroy.bind(stream)); + PromisePrototypeCatch(this.source.close(), + FunctionPrototypeBind(stream.destroy, stream)); else this.source.releaseFD(); } @@ -2431,7 +2450,8 @@ function doSendFD(session, options, fd, headers, streamOptions, err, stat) { // response is canceled. The user code may also send a separate type // of response so check again for the HEADERS_SENT flag if ((typeof options.statCheck === 'function' && - options.statCheck.call(this, stat, headers, statOptions) === false) || + ReflectApply(options.statCheck, this, + [stat, headers, statOptions]) === false) || (this[kState].flags & STREAM_FLAGS_HEADERS_SENT)) { return; } @@ -2490,7 +2510,7 @@ function doSendFileFD(session, options, fd, headers, streamOptions, err, stat) { // response is canceled. The user code may also send a separate type // of response so check again for the HEADERS_SENT flag if ((typeof options.statCheck === 'function' && - options.statCheck.call(this, stat, headers) === false) || + ReflectApply(options.statCheck, this, [stat, headers]) === false) || (this[kState].flags & STREAM_FLAGS_HEADERS_SENT)) { tryClose(fd); return; @@ -2528,8 +2548,9 @@ function afterOpen(session, options, headers, streamOptions, err, fd) { state.fd = fd; fs.fstat(fd, - doSendFileFD.bind(this, session, options, fd, - headers, streamOptions)); + FunctionPrototypeBind(doSendFileFD, this, + session, options, fd, + headers, streamOptions)); } class ServerHttp2Stream extends Http2Stream { @@ -2732,8 +2753,9 @@ class ServerHttp2Stream extends Http2Stream { if (options.statCheck !== undefined) { fs.fstat(fd, - doSendFD.bind(this, session, options, fd, - headers, streamOptions)); + FunctionPrototypeBind(doSendFD, this, + session, options, fd, + headers, streamOptions)); return; } @@ -2792,7 +2814,8 @@ class ServerHttp2Stream extends Http2Stream { } fs.open(path, 'r', - afterOpen.bind(this, session, options, headers, streamOptions)); + FunctionPrototypeBind(afterOpen, this, + session, options, headers, streamOptions)); } // Sends a block of informational headers. In theory, the HTTP/2 spec @@ -2828,7 +2851,7 @@ class ServerHttp2Stream extends Http2Stream { if (!this[kInfoHeaders]) this[kInfoHeaders] = [headers]; else - this[kInfoHeaders].push(headers); + ArrayPrototypePush(this[kInfoHeaders], headers); const ret = this[kHandle].info(headersList); if (ret < 0) @@ -2982,7 +3005,7 @@ function initializeTLSOptions(options, servername) { options = initializeOptions(options); options.ALPNProtocols = ['h2']; if (options.allowHTTP1 === true) - options.ALPNProtocols.push('http/1.1'); + ArrayPrototypePush(options.ALPNProtocols, 'http/1.1'); if (servername !== undefined && options.servername === undefined) options.servername = servername; return options; @@ -3079,18 +3102,18 @@ Http2Server.prototype[EventEmitter.captureRejectionSymbol] = function( } break; default: - net.Server.prototype[EventEmitter.captureRejectionSymbol] - .call(this, err, event, ...args); + ReflectApply(net.Server.prototype[EventEmitter.captureRejectionSymbol], + this, [err, event, ...args]); } }; function setupCompat(ev) { if (ev === 'request') { this.removeListener('newListener', setupCompat); - this.on('stream', onServerStream.bind( - this, - this[kOptions].Http2ServerRequest, - this[kOptions].Http2ServerResponse) + this.on('stream', FunctionPrototypeBind(onServerStream, + this, + this[kOptions].Http2ServerRequest, + this[kOptions].Http2ServerResponse) ); } } @@ -3131,7 +3154,7 @@ function connect(authority, options, listener) { host = authority.hostname; if (host[0] === '[') - host = host.slice(1, -1); + host = StringPrototypeSlice(host, 1, -1); } else if (authority.host) { host = authority.host; } diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index b3fdf420c8c2b5..dadfb68e8b5289 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -2,14 +2,18 @@ const { ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeMap, + ArrayPrototypePush, Error, MathMax, Number, ObjectCreate, ObjectKeys, - Set, + SafeSet, String, StringFromCharCode, + StringPrototypeIncludes, StringPrototypeToLowerCase, Symbol, } = primordials; @@ -98,7 +102,7 @@ const { // This set is defined strictly by the HTTP/2 specification. Only // :-prefixed headers defined by that specification may be added to // this set. -const kValidPseudoHeaders = new Set([ +const kValidPseudoHeaders = new SafeSet([ HTTP2_HEADER_STATUS, HTTP2_HEADER_METHOD, HTTP2_HEADER_AUTHORITY, @@ -109,7 +113,7 @@ const kValidPseudoHeaders = new Set([ // This set contains headers that are permitted to have only a single // value. Multiple instances must not be specified. -const kSingleValueHeaders = new Set([ +const kSingleValueHeaders = new SafeSet([ HTTP2_HEADER_STATUS, HTTP2_HEADER_METHOD, HTTP2_HEADER_AUTHORITY, @@ -156,7 +160,7 @@ const kSingleValueHeaders = new Set([ // meaning to the request payload. By default, unless the user explicitly // overrides the endStream option on the request method, the endStream // option will be defaulted to true when these methods are used. -const kNoPayloadMethods = new Set([ +const kNoPayloadMethods = new SafeSet([ HTTP2_METHOD_DELETE, HTTP2_METHOD_GET, HTTP2_METHOD_HEAD @@ -468,7 +472,7 @@ function mapToHeaders(map, let ret = ''; let count = 0; const keys = ObjectKeys(map); - const singles = new Set(); + const singles = new SafeSet(); let i, j; let isArray; let key; @@ -476,13 +480,14 @@ function mapToHeaders(map, let isSingleValueHeader; let err; const neverIndex = - (map[kSensitiveHeaders] || emptyArray).map(StringPrototypeToLowerCase); + ArrayPrototypeMap(map[kSensitiveHeaders] || emptyArray, + StringPrototypeToLowerCase); for (i = 0; i < keys.length; ++i) { key = keys[i]; value = map[key]; if (value === undefined || key === '') continue; - key = key.toLowerCase(); + key = StringPrototypeToLowerCase(key); isSingleValueHeader = kSingleValueHeaders.has(key); isArray = ArrayIsArray(value); if (isArray) { @@ -505,7 +510,9 @@ function mapToHeaders(map, throw new ERR_HTTP2_HEADER_SINGLE_VALUE(key); singles.add(key); } - const flags = neverIndex.includes(key) ? kNeverIndexFlag : kNoHeaderFlags; + const flags = ArrayPrototypeIncludes(neverIndex, key) ? + kNeverIndexFlag : + kNoHeaderFlags; if (key[0] === ':') { err = assertValuePseudoHeader(key); if (err !== undefined) @@ -514,7 +521,7 @@ function mapToHeaders(map, count++; continue; } - if (key.indexOf(' ') >= 0) { + if (StringPrototypeIncludes(key, ' ')) { throw new ERR_INVALID_HTTP_TOKEN('Header name', key); } if (isIllegalConnectionSpecificHeader(key, value)) { @@ -595,7 +602,7 @@ function toHeaderObject(headers, sensitiveHeaders) { // fields with the same name. Since it cannot be combined into a // single field-value, recipients ought to handle "Set-Cookie" as a // special case while processing header fields." - existing.push(value); + ArrayPrototypePush(existing, value); break; default: // https://tools.ietf.org/html/rfc7230#section-3.2.2 From 28d710164a3abec9da65f20da4c8b50cda6d97e7 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 18 Nov 2020 10:53:25 +0100 Subject: [PATCH 84/98] async_hooks: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36168 Reviewed-By: Rich Trott Reviewed-By: James M Snell Reviewed-By: Benjamin Gruenbaum Reviewed-By: Trivikram Kamat --- lib/async_hooks.js | 20 +++++++++++++------- lib/internal/async_hooks.js | 8 +++++--- lib/internal/inspector_async_hook.js | 4 ++-- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/lib/async_hooks.js b/lib/async_hooks.js index b6865b6f1cd03e..90b48ebe4b2754 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -1,6 +1,11 @@ 'use strict'; const { + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeSplice, + FunctionPrototypeBind, NumberIsSafeInteger, ObjectDefineProperties, ObjectIs, @@ -85,7 +90,7 @@ class AsyncHook { const [hooks_array, hook_fields] = getHookArrays(); // Each hook is only allowed to be added once. - if (hooks_array.includes(this)) + if (ArrayPrototypeIncludes(hooks_array, this)) return this; const prev_kTotals = hook_fields[kTotals]; @@ -99,7 +104,7 @@ class AsyncHook { hook_fields[kTotals] += hook_fields[kDestroy] += +!!this[destroy_symbol]; hook_fields[kTotals] += hook_fields[kPromiseResolve] += +!!this[promise_resolve_symbol]; - hooks_array.push(this); + ArrayPrototypePush(hooks_array, this); if (prev_kTotals === 0 && hook_fields[kTotals] > 0) { enableHooks(); @@ -113,7 +118,7 @@ class AsyncHook { disable() { const [hooks_array, hook_fields] = getHookArrays(); - const index = hooks_array.indexOf(this); + const index = ArrayPrototypeIndexOf(hooks_array, this); if (index === -1) return this; @@ -125,7 +130,7 @@ class AsyncHook { hook_fields[kTotals] += hook_fields[kDestroy] -= +!!this[destroy_symbol]; hook_fields[kTotals] += hook_fields[kPromiseResolve] -= +!!this[promise_resolve_symbol]; - hooks_array.splice(index, 1); + ArrayPrototypeSplice(hooks_array, index, 1); if (prev_kTotals > 0 && hook_fields[kTotals] === 0) { disableHooks(); @@ -218,7 +223,7 @@ class AsyncResource { bind(fn) { if (typeof fn !== 'function') throw new ERR_INVALID_ARG_TYPE('fn', 'Function', fn); - const ret = this.runInAsyncScope.bind(this, fn); + const ret = FunctionPrototypeBind(this.runInAsyncScope, this, fn); ObjectDefineProperties(ret, { 'length': { configurable: true, @@ -264,7 +269,8 @@ class AsyncLocalStorage { if (this.enabled) { this.enabled = false; // If this.enabled, the instance must be in storageList - storageList.splice(storageList.indexOf(this), 1); + ArrayPrototypeSplice(storageList, + ArrayPrototypeIndexOf(storageList, this), 1); if (storageList.length === 0) { storageHook.disable(); } @@ -274,7 +280,7 @@ class AsyncLocalStorage { _enable() { if (!this.enabled) { this.enabled = true; - storageList.push(this); + ArrayPrototypePush(storageList, this); storageHook.enable(); } } diff --git a/lib/internal/async_hooks.js b/lib/internal/async_hooks.js index adc87f9ed9662d..84e73280ccec48 100644 --- a/lib/internal/async_hooks.js +++ b/lib/internal/async_hooks.js @@ -1,6 +1,8 @@ 'use strict'; const { + ArrayPrototypePop, + ArrayPrototypeSlice, ArrayPrototypeUnshift, ErrorCaptureStackTrace, FunctionPrototypeBind, @@ -132,7 +134,7 @@ function callbackTrampoline(asyncId, resource, cb, ...args) { if (asyncId !== 0 && hasHooks(kAfter)) emitAfterNative(asyncId); - execution_async_resources.pop(); + ArrayPrototypePop(execution_async_resources); return result; } @@ -270,7 +272,7 @@ function getHookArrays() { function storeActiveHooks() { - active_hooks.tmp_array = active_hooks.array.slice(); + active_hooks.tmp_array = ArrayPrototypeSlice(active_hooks.array); // Don't want to make the assumption that kInit to kDestroy are indexes 0 to // 4. So do this the long way. active_hooks.tmp_fields = []; @@ -522,7 +524,7 @@ function popAsyncContext(asyncId) { const offset = stackLength - 1; async_id_fields[kExecutionAsyncId] = async_wrap.async_ids_stack[2 * offset]; async_id_fields[kTriggerAsyncId] = async_wrap.async_ids_stack[2 * offset + 1]; - execution_async_resources.pop(); + ArrayPrototypePop(execution_async_resources); async_hook_fields[kStackLength] = offset; return offset > 0; } diff --git a/lib/internal/inspector_async_hook.js b/lib/internal/inspector_async_hook.js index a6112697cfdaa2..bd3aa635051c5b 100644 --- a/lib/internal/inspector_async_hook.js +++ b/lib/internal/inspector_async_hook.js @@ -4,7 +4,7 @@ let hook; let config; const { - Set, + SafeSet, } = primordials; function lazyHookCreation() { @@ -44,7 +44,7 @@ function lazyHookCreation() { }, }); - hook.promiseIds = new Set(); + hook.promiseIds = new SafeSet(); } function enable() { From 545ac1fec521fedabfbe34048eb5dc7672e97603 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 20 Nov 2020 06:16:50 -0800 Subject: [PATCH 85/98] doc: fix punctuation in v8.md Two minor punctuation fixes for v8.md. PR-URL: https://github.com/nodejs/node/pull/36192 Reviewed-By: Antoine du Hamel Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Richard Lau --- doc/api/v8.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/v8.md b/doc/api/v8.md index 8f13042f80b9c6..ac2498a16f6fe9 100644 --- a/doc/api/v8.md +++ b/doc/api/v8.md @@ -231,12 +231,12 @@ added: v15.1.0 The `v8.takeCoverage()` method allows the user to write the coverage started by [`NODE_V8_COVERAGE`][] to disk on demand. This method can be invoked multiple -times during the lifetime of the process, each time the execution counter will +times during the lifetime of the process. Each time the execution counter will be reset and a new coverage report will be written to the directory specified by [`NODE_V8_COVERAGE`][]. When the process is about to exit, one last coverage will still be written to -disk, unless [`v8.stopCoverage()`][] is invoked before the process exits. +disk unless [`v8.stopCoverage()`][] is invoked before the process exits. ## `v8.stopCoverage()` From bcbf176c2249433d0877b00556f3595e67c00a41 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 18 Nov 2020 10:57:14 +0100 Subject: [PATCH 86/98] errors: refactor to use more primordials PR-URL: https://github.com/nodejs/node/pull/36167 Reviewed-By: James M Snell Reviewed-By: Trivikram Kamat --- lib/internal/errors.js | 8 +++++--- test/parallel/test-errors-systemerror.js | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 33ea2824784ef0..cff0e9d563c5f7 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -40,6 +40,7 @@ const { String, StringPrototypeEndsWith, StringPrototypeIncludes, + StringPrototypeMatch, StringPrototypeSlice, StringPrototypeSplit, StringPrototypeStartsWith, @@ -96,7 +97,7 @@ const prepareStackTrace = (globalThis, error, trace) => { if (trace.length === 0) { return errorString; } - return `${errorString}\n at ${trace.join('\n at ')}`; + return `${errorString}\n at ${ArrayPrototypeJoin(trace, '\n at ')}`; }; const maybeOverridePrepareStackTrace = (globalThis, error, trace) => { @@ -376,10 +377,11 @@ function getMessage(key, args, self) { `Code: ${key}; The provided arguments length (${args.length}) does not ` + `match the required ones (${msg.length}).` ); - return msg.apply(self, args); + return ReflectApply(msg, self, args); } - const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length; + const expectedLength = + (StringPrototypeMatch(msg, /%[dfijoOs]/g) || []).length; assert( expectedLength === args.length, `Code: ${key}; The provided arguments length (${args.length}) does not ` + diff --git a/test/parallel/test-errors-systemerror.js b/test/parallel/test-errors-systemerror.js index e801871f40af2c..2a20588e75b386 100644 --- a/test/parallel/test-errors-systemerror.js +++ b/test/parallel/test-errors-systemerror.js @@ -9,7 +9,7 @@ assert.throws( () => { new SystemError(); }, { name: 'TypeError', - message: 'Cannot read property \'match\' of undefined' + message: 'String.prototype.match called on null or undefined' } ); From 5698cc08f0101d48dbce2b86ada9f5f8acaa5c26 Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Wed, 18 Nov 2020 16:33:06 -0800 Subject: [PATCH 87/98] n-api: fix test_async_context warnings MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Gabriel Schulhof PR-URL: https://github.com/nodejs/node/pull/36171 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Chengzhong Wu Reviewed-By: Michael Dawson Reviewed-By: Gerhard Stöbich --- test/node-api/test_async_context/binding.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/node-api/test_async_context/binding.c b/test/node-api/test_async_context/binding.c index 749bb05d2503e7..3dab0fd0e818dd 100644 --- a/test/node-api/test_async_context/binding.c +++ b/test/node-api/test_async_context/binding.c @@ -27,7 +27,7 @@ static napi_value MakeCallback(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_typeof(env, func, &func_type)); napi_async_context context; - NAPI_CALL(env, napi_unwrap(env, async_context_wrap, &context)); + NAPI_CALL(env, napi_unwrap(env, async_context_wrap, (void**)&context)); napi_value result; if (func_type == napi_function) { @@ -97,7 +97,8 @@ static napi_value DestroyAsyncResource(napi_env env, napi_callback_info info) { napi_value async_context_wrap = args[0]; napi_async_context async_context; - NAPI_CALL(env, napi_remove_wrap(env, async_context_wrap, &async_context)); + NAPI_CALL(env, + napi_remove_wrap(env, async_context_wrap, (void**)&async_context)); NAPI_CALL(env, napi_async_destroy(env, async_context)); return async_context_wrap; From 79b2ba6744af6f63335b7f63b9a52789fd7aa32e Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Wed, 18 Nov 2020 14:32:23 -0800 Subject: [PATCH 88/98] n-api: clean up binding creation * Remove dead code for `GetterCallbackWrapper` and `SetterCallbackWrapper`. * Factor out creation of new `v8::Function`s. * Factor out creation of new `v8::FunctionTemplate`s. * Turn `CallbackBundle` into a class, internalizing creation of new instances and garbage collection. Signed-off-by: Gabriel Schulhof PR-URL: https://github.com/nodejs/node/pull/36170 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Anna Henningsen Reviewed-By: David Carlier Reviewed-By: Michael Dawson --- src/js_native_api_v8.cc | 263 ++++++++++++---------------------------- 1 file changed, 80 insertions(+), 183 deletions(-) diff --git a/src/js_native_api_v8.cc b/src/js_native_api_v8.cc index 64e4298e122f43..e037c4297de0c5 100644 --- a/src/js_native_api_v8.cc +++ b/src/js_native_api_v8.cc @@ -418,18 +418,36 @@ inline static napi_status Unwrap(napi_env env, // calling through N-API. // Ref: benchmark/misc/function_call // Discussion (incl. perf. data): https://github.com/nodejs/node/pull/21072 -struct CallbackBundle { +class CallbackBundle { + public: + // Creates an object to be made available to the static function callback + // wrapper, used to retrieve the native callback function and data pointer. + static inline v8::Local + New(napi_env env, napi_callback cb, void* data) { + CallbackBundle* bundle = new CallbackBundle(); + bundle->cb = cb; + bundle->cb_data = data; + bundle->env = env; + + v8::Local cbdata = v8::External::New(env->isolate, bundle); + Reference::New(env, cbdata, 0, true, Delete, bundle, nullptr); + return cbdata; + } napi_env env; // Necessary to invoke C++ NAPI callback void* cb_data; // The user provided callback data - napi_callback function_or_getter; - napi_callback setter; + napi_callback cb; + private: + static void Delete(napi_env env, void* data, void* hint) { + CallbackBundle* bundle = static_cast(data); + delete bundle; + } }; // Base class extended by classes that wrap V8 function and property callback // info. class CallbackWrapper { public: - CallbackWrapper(napi_value this_arg, size_t args_length, void* data) + inline CallbackWrapper(napi_value this_arg, size_t args_length, void* data) : _this(this_arg), _args_length(args_length), _data(data) {} virtual napi_value GetNewTarget() = 0; @@ -448,10 +466,10 @@ class CallbackWrapper { void* _data; }; -template class CallbackWrapperBase : public CallbackWrapper { public: - CallbackWrapperBase(const Info& cbinfo, const size_t args_length) + inline CallbackWrapperBase(const v8::FunctionCallbackInfo& cbinfo, + const size_t args_length) : CallbackWrapper(JsValueFromV8LocalValue(cbinfo.This()), args_length, nullptr), @@ -461,16 +479,14 @@ class CallbackWrapperBase : public CallbackWrapper { _data = _bundle->cb_data; } - napi_value GetNewTarget() override { return nullptr; } - protected: - void InvokeCallback() { + inline void InvokeCallback() { napi_callback_info cbinfo_wrapper = reinterpret_cast( static_cast(this)); // All other pointers we need are stored in `_bundle` napi_env env = _bundle->env; - napi_callback cb = _bundle->*FunctionField; + napi_callback cb = _bundle->cb; napi_value result; env->CallIntoModule([&](napi_env env) { @@ -482,19 +498,45 @@ class CallbackWrapperBase : public CallbackWrapper { } } - const Info& _cbinfo; + const v8::FunctionCallbackInfo& _cbinfo; CallbackBundle* _bundle; }; class FunctionCallbackWrapper - : public CallbackWrapperBase, - &CallbackBundle::function_or_getter> { + : public CallbackWrapperBase { public: static void Invoke(const v8::FunctionCallbackInfo& info) { FunctionCallbackWrapper cbwrapper(info); cbwrapper.InvokeCallback(); } + static inline napi_status NewFunction(napi_env env, + napi_callback cb, + void* cb_data, + v8::Local* result) { + v8::Local cbdata = v8impl::CallbackBundle::New(env, cb, cb_data); + RETURN_STATUS_IF_FALSE(env, !cbdata.IsEmpty(), napi_generic_failure); + + v8::MaybeLocal maybe_function = + v8::Function::New(env->context(), Invoke, cbdata); + CHECK_MAYBE_EMPTY(env, maybe_function, napi_generic_failure); + + *result = maybe_function.ToLocalChecked(); + return napi_clear_last_error(env); + } + + static inline napi_status NewTemplate(napi_env env, + napi_callback cb, + void* cb_data, + v8::Local* result, + v8::Local sig = v8::Local()) { + v8::Local cbdata = v8impl::CallbackBundle::New(env, cb, cb_data); + RETURN_STATUS_IF_FALSE(env, !cbdata.IsEmpty(), napi_generic_failure); + + *result = v8::FunctionTemplate::New(env->isolate, Invoke, cbdata, sig); + return napi_clear_last_error(env); + } + explicit FunctionCallbackWrapper( const v8::FunctionCallbackInfo& cbinfo) : CallbackWrapperBase(cbinfo, cbinfo.Length()) {} @@ -532,98 +574,6 @@ class FunctionCallbackWrapper } }; -class GetterCallbackWrapper - : public CallbackWrapperBase, - &CallbackBundle::function_or_getter> { - public: - static void Invoke(v8::Local property, - const v8::PropertyCallbackInfo& info) { - GetterCallbackWrapper cbwrapper(info); - cbwrapper.InvokeCallback(); - } - - explicit GetterCallbackWrapper( - const v8::PropertyCallbackInfo& cbinfo) - : CallbackWrapperBase(cbinfo, 0) {} - - /*virtual*/ - void Args(napi_value* buffer, size_t buffer_length) override { - if (buffer_length > 0) { - napi_value undefined = - v8impl::JsValueFromV8LocalValue(v8::Undefined(_cbinfo.GetIsolate())); - for (size_t i = 0; i < buffer_length; i += 1) { - buffer[i] = undefined; - } - } - } - - /*virtual*/ - void SetReturnValue(napi_value value) override { - v8::Local val = v8impl::V8LocalValueFromJsValue(value); - _cbinfo.GetReturnValue().Set(val); - } -}; - -class SetterCallbackWrapper - : public CallbackWrapperBase, - &CallbackBundle::setter> { - public: - static void Invoke(v8::Local property, - v8::Local value, - const v8::PropertyCallbackInfo& info) { - SetterCallbackWrapper cbwrapper(info, value); - cbwrapper.InvokeCallback(); - } - - SetterCallbackWrapper(const v8::PropertyCallbackInfo& cbinfo, - const v8::Local& value) - : CallbackWrapperBase(cbinfo, 1), _value(value) {} - - /*virtual*/ - void Args(napi_value* buffer, size_t buffer_length) override { - if (buffer_length > 0) { - buffer[0] = v8impl::JsValueFromV8LocalValue(_value); - - if (buffer_length > 1) { - napi_value undefined = v8impl::JsValueFromV8LocalValue( - v8::Undefined(_cbinfo.GetIsolate())); - for (size_t i = 1; i < buffer_length; i += 1) { - buffer[i] = undefined; - } - } - } - } - - /*virtual*/ - void SetReturnValue(napi_value value) override { - // Ignore any value returned from a setter callback. - } - - private: - const v8::Local& _value; -}; - -static void DeleteCallbackBundle(napi_env env, void* data, void* hint) { - CallbackBundle* bundle = static_cast(data); - delete bundle; -} - -// Creates an object to be made available to the static function callback -// wrapper, used to retrieve the native callback function and data pointer. -static -v8::Local CreateFunctionCallbackData(napi_env env, - napi_callback cb, - void* data) { - CallbackBundle* bundle = new CallbackBundle(); - bundle->function_or_getter = cb; - bundle->cb_data = data; - bundle->env = env; - v8::Local cbdata = v8::External::New(env->isolate, bundle); - Reference::New(env, cbdata, 0, true, DeleteCallbackBundle, bundle, nullptr); - - return cbdata; -} - enum WrapType { retrievable, anonymous @@ -745,22 +695,12 @@ napi_status napi_create_function(napi_env env, CHECK_ARG(env, result); CHECK_ARG(env, cb); - v8::Isolate* isolate = env->isolate; v8::Local return_value; - v8::EscapableHandleScope scope(isolate); - v8::Local cbdata = - v8impl::CreateFunctionCallbackData(env, cb, callback_data); - - RETURN_STATUS_IF_FALSE(env, !cbdata.IsEmpty(), napi_generic_failure); - - v8::Local context = env->context(); - v8::MaybeLocal maybe_function = - v8::Function::New(context, - v8impl::FunctionCallbackWrapper::Invoke, - cbdata); - CHECK_MAYBE_EMPTY(env, maybe_function, napi_generic_failure); - - return_value = scope.Escape(maybe_function.ToLocalChecked()); + v8::EscapableHandleScope scope(env->isolate); + v8::Local fn; + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewFunction( + env, cb, callback_data, &fn)); + return_value = scope.Escape(fn); if (utf8name != nullptr) { v8::Local name_string; @@ -792,13 +732,9 @@ napi_status napi_define_class(napi_env env, v8::Isolate* isolate = env->isolate; v8::EscapableHandleScope scope(isolate); - v8::Local cbdata = - v8impl::CreateFunctionCallbackData(env, constructor, callback_data); - - RETURN_STATUS_IF_FALSE(env, !cbdata.IsEmpty(), napi_generic_failure); - - v8::Local tpl = v8::FunctionTemplate::New( - isolate, v8impl::FunctionCallbackWrapper::Invoke, cbdata); + v8::Local tpl; + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewTemplate( + env, constructor, callback_data, &tpl)); v8::Local name_string; CHECK_NEW_FROM_UTF8_LEN(env, name_string, utf8name, length); @@ -828,18 +764,12 @@ napi_status napi_define_class(napi_env env, v8::Local getter_tpl; v8::Local setter_tpl; if (p->getter != nullptr) { - v8::Local getter_data = - v8impl::CreateFunctionCallbackData(env, p->getter, p->data); - - getter_tpl = v8::FunctionTemplate::New( - isolate, v8impl::FunctionCallbackWrapper::Invoke, getter_data); + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewTemplate( + env, p->getter, p->data, &getter_tpl)); } if (p->setter != nullptr) { - v8::Local setter_data = - v8impl::CreateFunctionCallbackData(env, p->setter, p->data); - - setter_tpl = v8::FunctionTemplate::New( - isolate, v8impl::FunctionCallbackWrapper::Invoke, setter_data); + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewTemplate( + env, p->setter, p->data, &setter_tpl)); } tpl->PrototypeTemplate()->SetAccessorProperty( @@ -849,16 +779,9 @@ napi_status napi_define_class(napi_env env, attributes, v8::AccessControl::DEFAULT); } else if (p->method != nullptr) { - v8::Local cbdata = - v8impl::CreateFunctionCallbackData(env, p->method, p->data); - - RETURN_STATUS_IF_FALSE(env, !cbdata.IsEmpty(), napi_generic_failure); - - v8::Local t = - v8::FunctionTemplate::New(isolate, - v8impl::FunctionCallbackWrapper::Invoke, - cbdata, - v8::Signature::New(isolate, tpl)); + v8::Local t; + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewTemplate( + env, p->method, p->data, &t, v8::Signature::New(isolate, tpl))); tpl->PrototypeTemplate()->Set(property_name, t, attributes); } else { @@ -1263,33 +1186,16 @@ napi_status napi_define_properties(napi_env env, STATUS_CALL(v8impl::V8NameFromPropertyDescriptor(env, p, &property_name)); if (p->getter != nullptr || p->setter != nullptr) { - v8::Local local_getter; - v8::Local local_setter; + v8::Local local_getter; + v8::Local local_setter; if (p->getter != nullptr) { - v8::Local getter_data = - v8impl::CreateFunctionCallbackData(env, p->getter, p->data); - CHECK_MAYBE_EMPTY(env, getter_data, napi_generic_failure); - - v8::MaybeLocal maybe_getter = - v8::Function::New(context, - v8impl::FunctionCallbackWrapper::Invoke, - getter_data); - CHECK_MAYBE_EMPTY(env, maybe_getter, napi_generic_failure); - - local_getter = maybe_getter.ToLocalChecked(); + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewFunction( + env, p->getter, p->data, &local_getter)); } if (p->setter != nullptr) { - v8::Local setter_data = - v8impl::CreateFunctionCallbackData(env, p->setter, p->data); - CHECK_MAYBE_EMPTY(env, setter_data, napi_generic_failure); - - v8::MaybeLocal maybe_setter = - v8::Function::New(context, - v8impl::FunctionCallbackWrapper::Invoke, - setter_data); - CHECK_MAYBE_EMPTY(env, maybe_setter, napi_generic_failure); - local_setter = maybe_setter.ToLocalChecked(); + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewFunction( + env, p->setter, p->data, &local_setter)); } v8::PropertyDescriptor descriptor(local_getter, local_setter); @@ -1304,19 +1210,10 @@ napi_status napi_define_properties(napi_env env, return napi_set_last_error(env, napi_invalid_arg); } } else if (p->method != nullptr) { - v8::Local cbdata = - v8impl::CreateFunctionCallbackData(env, p->method, p->data); - - CHECK_MAYBE_EMPTY(env, cbdata, napi_generic_failure); - - v8::MaybeLocal maybe_fn = - v8::Function::New(context, - v8impl::FunctionCallbackWrapper::Invoke, - cbdata); - - CHECK_MAYBE_EMPTY(env, maybe_fn, napi_generic_failure); - - v8::PropertyDescriptor descriptor(maybe_fn.ToLocalChecked(), + v8::Local method; + STATUS_CALL(v8impl::FunctionCallbackWrapper::NewFunction( + env, p->method, p->data, &method)); + v8::PropertyDescriptor descriptor(method, (p->attributes & napi_writable) != 0); descriptor.set_enumerable((p->attributes & napi_enumerable) != 0); descriptor.set_configurable((p->attributes & napi_configurable) != 0); From 48bf59bb8be9e2ff4719fbf34f96b797411431fb Mon Sep 17 00:00:00 2001 From: Madara Uchiha Date: Tue, 10 Nov 2020 19:06:41 +0200 Subject: [PATCH 89/98] http2: add support for AbortSignal to http2Session.request - Add support - Add test - Docs once PR is up PR-URL: https://github.com/nodejs/node/pull/36070 Reviewed-By: Matteo Collina Reviewed-By: Benjamin Gruenbaum --- doc/api/http2.md | 9 +++ lib/internal/http2/core.js | 18 +++++- test/parallel/test-http2-client-destroy.js | 74 ++++++++++++++++++++++ 3 files changed, 100 insertions(+), 1 deletion(-) diff --git a/doc/api/http2.md b/doc/api/http2.md index 9fc447b657b368..8a676d24de73f1 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -2,6 +2,9 @@ Cancel all outstanding DNS queries made by this resolver. The corresponding diff --git a/doc/api/esm.md b/doc/api/esm.md index 385aee6f84a8f5..17879fda4244bd 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -6,7 +6,7 @@ added: v8.5.0 changes: - version: - - REPLACEME + - v15.3.0 pr-url: https://github.com/nodejs/node/pull/35781 description: Stabilize modules implementation. - version: diff --git a/doc/api/events.md b/doc/api/events.md index 1ce815ce92cbe3..4115c762655040 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -385,7 +385,7 @@ regular `'error'` listener is installed. ### `EventEmitter.setMaxListeners(n[, ...eventTargets])` * `n` {number} A non-negative number. The maximum number of listeners per diff --git a/doc/api/http.md b/doc/api/http.md index 104682012ce69f..8e7056114bee8f 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2336,7 +2336,7 @@ This can be overridden for servers and client requests by passing the * `windowSize` {number} diff --git a/doc/api/path.md b/doc/api/path.md index 36c281b772e1af..ca62e2f3628bfe 100644 --- a/doc/api/path.md +++ b/doc/api/path.md @@ -435,7 +435,7 @@ A [`TypeError`][] is thrown if `path` is not a string. @@ -575,7 +575,7 @@ method is non-operational and always returns `path` without modifications. diff --git a/doc/api/readline.md b/doc/api/readline.md index 8f96616fd235c6..7ea2a1d5a60552 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -285,7 +285,7 @@ whenever `rl.prompt()` is called. ### `rl.getPrompt()` * Returns: {string} the current prompt string diff --git a/doc/api/util.md b/doc/api/util.md index 4a506b18b70ddf..d6be620b42c56d 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -1291,7 +1291,7 @@ The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. diff --git a/doc/changelogs/CHANGELOG_V15.md b/doc/changelogs/CHANGELOG_V15.md index 08be268f9e3d8a..721984bed78e8e 100644 --- a/doc/changelogs/CHANGELOG_V15.md +++ b/doc/changelogs/CHANGELOG_V15.md @@ -10,6 +10,7 @@ +15.3.0
15.2.1
15.2.0
15.1.0
@@ -36,6 +37,121 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2020-11-24, Version 15.3.0 (Current), @codebytere + +### Notable Changes + +* [[`6349b1d673`](https://github.com/nodejs/node/commit/6349b1d673)] - **(SEMVER-MINOR)** **dns**: add a cancel() method to the promise Resolver (Szymon Marczak) [#33099](https://github.com/nodejs/node/pull/33099) +* [[`9ce9b016e6`](https://github.com/nodejs/node/commit/9ce9b016e6)] - **(SEMVER-MINOR)** **events**: add max listener warning for EventTarget (James M Snell) [#36001](https://github.com/nodejs/node/pull/36001) +* [[`8390f8a86b`](https://github.com/nodejs/node/commit/8390f8a86b)] - **(SEMVER-MINOR)** **http**: add support for abortsignal to http.request (Benjamin Gruenbaum) [#36048](https://github.com/nodejs/node/pull/36048) +* [[`9c6be3cc90`](https://github.com/nodejs/node/commit/9c6be3cc90)] - **(SEMVER-MINOR)** **http2**: allow setting the local window size of a session (Yongsheng Zhang) [#35978](https://github.com/nodejs/node/pull/35978) +* [[`15ff155c12`](https://github.com/nodejs/node/commit/15ff155c12)] - **(SEMVER-MINOR)** **lib**: add throws option to fs.f/l/statSync (Andrew Casey) [#33716](https://github.com/nodejs/node/pull/33716) +* [[`85c85d368a`](https://github.com/nodejs/node/commit/85c85d368a)] - **(SEMVER-MINOR)** **path**: add `path/posix` and `path/win32` alias modules (ExE Boss) [#34962](https://github.com/nodejs/node/pull/34962) +* [[`d1baae3640`](https://github.com/nodejs/node/commit/d1baae3640)] - **(SEMVER-MINOR)** **readline**: add getPrompt to get the current prompt (Mattias Runge-Broberg) [#33675](https://github.com/nodejs/node/pull/33675) +* [[`5729478509`](https://github.com/nodejs/node/commit/5729478509)] - **(SEMVER-MINOR)** **src**: add loop idle time in diagnostic report (Gireesh Punathil) [#35940](https://github.com/nodejs/node/pull/35940) +* [[`baa87c1a7d`](https://github.com/nodejs/node/commit/baa87c1a7d)] - **(SEMVER-MINOR)** **util**: add `util/types` alias module (ExE Boss) [#34055](https://github.com/nodejs/node/pull/34055) + +### Commits + +* [[`34aa0c868e`](https://github.com/nodejs/node/commit/34aa0c868e)] - **assert**: refactor to use more primordials (Antoine du Hamel) [#35998](https://github.com/nodejs/node/pull/35998) +* [[`28d710164a`](https://github.com/nodejs/node/commit/28d710164a)] - **async_hooks**: refactor to use more primordials (Antoine du Hamel) [#36168](https://github.com/nodejs/node/pull/36168) +* [[`1924255fdb`](https://github.com/nodejs/node/commit/1924255fdb)] - **async_hooks**: fix leak in AsyncLocalStorage exit (Stephen Belanger) [#35779](https://github.com/nodejs/node/pull/35779) +* [[`3ee556a867`](https://github.com/nodejs/node/commit/3ee556a867)] - **benchmark**: fix build warnings (Gabriel Schulhof) [#36157](https://github.com/nodejs/node/pull/36157) +* [[`fcc38a1312`](https://github.com/nodejs/node/commit/fcc38a1312)] - **build**: replace which with command -v (raisinten) [#36118](https://github.com/nodejs/node/pull/36118) +* [[`60874ba941`](https://github.com/nodejs/node/commit/60874ba941)] - **build**: try “python3” as a last resort for 3.x (Ole André Vadla Ravnås) [#35983](https://github.com/nodejs/node/pull/35983) +* [[`fbe210b2a1`](https://github.com/nodejs/node/commit/fbe210b2a1)] - **build**: conditionally clear vcinstalldir (Brian Ingenito) [#36009](https://github.com/nodejs/node/pull/36009) +* [[`56f83e6876`](https://github.com/nodejs/node/commit/56f83e6876)] - **build**: refactor configure.py to use argparse (raisinten) [#35755](https://github.com/nodejs/node/pull/35755) +* [[`0b70822461`](https://github.com/nodejs/node/commit/0b70822461)] - **child_process**: refactor to use more primordials (Antoine du Hamel) [#36003](https://github.com/nodejs/node/pull/36003) +* [[`e54108f2e4`](https://github.com/nodejs/node/commit/e54108f2e4)] - **cluster**: refactor to use more primordials (Antoine du Hamel) [#36011](https://github.com/nodejs/node/pull/36011) +* [[`272fc794b2`](https://github.com/nodejs/node/commit/272fc794b2)] - **crypto**: fix format warning in AdditionalConfig (raisinten) [#36060](https://github.com/nodejs/node/pull/36060) +* [[`63a138e02f`](https://github.com/nodejs/node/commit/63a138e02f)] - **crypto**: fix passing TypedArray to webcrypto AES methods (Antoine du Hamel) [#36087](https://github.com/nodejs/node/pull/36087) +* [[`4a88c73fa5`](https://github.com/nodejs/node/commit/4a88c73fa5)] - **deps**: upgrade npm to 7.0.14 (nlf) [#36238](https://github.com/nodejs/node/pull/36238) +* [[`d16e8622a7`](https://github.com/nodejs/node/commit/d16e8622a7)] - **deps**: upgrade npm to 7.0.13 (Ruy Adorno) [#36202](https://github.com/nodejs/node/pull/36202) +* [[`c23ee3744f`](https://github.com/nodejs/node/commit/c23ee3744f)] - **deps**: upgrade npm to 7.0.12 (Ruy Adorno) [#36153](https://github.com/nodejs/node/pull/36153) +* [[`0fcbb1c0d5`](https://github.com/nodejs/node/commit/0fcbb1c0d5)] - **deps**: V8: cherry-pick 3176bfd447a9 (Anna Henningsen) [#35612](https://github.com/nodejs/node/pull/35612) +* [[`27f1bc05fd`](https://github.com/nodejs/node/commit/27f1bc05fd)] - **deps**: upgrade npm to 7.0.11 (Darcy Clarke) [#36112](https://github.com/nodejs/node/pull/36112) +* [[`8ae3ffe2be`](https://github.com/nodejs/node/commit/8ae3ffe2be)] - **deps**: V8: cherry-pick 1d0f426311d4 (Ole André Vadla Ravnås) [#35986](https://github.com/nodejs/node/pull/35986) +* [[`4b7ba11d67`](https://github.com/nodejs/node/commit/4b7ba11d67)] - **deps**: V8: cherry-pick 4e077ff0444a (Ole André Vadla Ravnås) [#35986](https://github.com/nodejs/node/pull/35986) +* [[`098a5b1298`](https://github.com/nodejs/node/commit/098a5b1298)] - **deps**: V8: cherry-pick 086eecbd96b6 (Ole André Vadla Ravnås) [#35986](https://github.com/nodejs/node/pull/35986) +* [[`d2c757ab19`](https://github.com/nodejs/node/commit/d2c757ab19)] - **deps**: V8: cherry-pick 27e1ac1a79ff (Ole André Vadla Ravnås) [#35986](https://github.com/nodejs/node/pull/35986) +* [[`6349b1d673`](https://github.com/nodejs/node/commit/6349b1d673)] - **(SEMVER-MINOR)** **dns**: add a cancel() method to the promise Resolver (Szymon Marczak) [#33099](https://github.com/nodejs/node/pull/33099) +* [[`0fbade38ef`](https://github.com/nodejs/node/commit/0fbade38ef)] - **doc**: add arm64 macOS as experimental (Richard Lau) [#36189](https://github.com/nodejs/node/pull/36189) +* [[`42dfda8f78`](https://github.com/nodejs/node/commit/42dfda8f78)] - **doc**: remove stray comma in url.md (Rich Trott) [#36175](https://github.com/nodejs/node/pull/36175) +* [[`8bbdbccbb6`](https://github.com/nodejs/node/commit/8bbdbccbb6)] - **doc**: revise agent.destroy() text (Rich Trott) [#36163](https://github.com/nodejs/node/pull/36163) +* [[`545ac1fec5`](https://github.com/nodejs/node/commit/545ac1fec5)] - **doc**: fix punctuation in v8.md (Rich Trott) [#36192](https://github.com/nodejs/node/pull/36192) +* [[`a6a90af8c0`](https://github.com/nodejs/node/commit/a6a90af8c0)] - **doc**: add compatibility/interop technical value (Geoffrey Booth) [#35323](https://github.com/nodejs/node/pull/35323) +* [[`4ab4a99900`](https://github.com/nodejs/node/commit/4ab4a99900)] - **doc**: de-emphasize wrapping in napi\_define\_class (Gabriel Schulhof) [#36159](https://github.com/nodejs/node/pull/36159) +* [[`bb29508e8f`](https://github.com/nodejs/node/commit/bb29508e8f)] - **doc**: add link for v8.takeCoverage() (Rich Trott) [#36135](https://github.com/nodejs/node/pull/36135) +* [[`24065b92f1`](https://github.com/nodejs/node/commit/24065b92f1)] - **doc**: mark modules implementation as stable (Guy Bedford) [#35781](https://github.com/nodejs/node/pull/35781) +* [[`142cacdc63`](https://github.com/nodejs/node/commit/142cacdc63)] - **doc**: clarify text about process not responding (Rich Trott) [#36117](https://github.com/nodejs/node/pull/36117) +* [[`0ff384b0be`](https://github.com/nodejs/node/commit/0ff384b0be)] - **doc**: esm docs consolidation and reordering (Guy Bedford) [#36046](https://github.com/nodejs/node/pull/36046) +* [[`b17a83a00d`](https://github.com/nodejs/node/commit/b17a83a00d)] - **doc**: claim ABI version for Electron v13 (Shelley Vohr) [#36101](https://github.com/nodejs/node/pull/36101) +* [[`e8a8513b2c`](https://github.com/nodejs/node/commit/e8a8513b2c)] - **doc**: fix invalid link in worker\_threads.md (Rich Trott) [#36109](https://github.com/nodejs/node/pull/36109) +* [[`cd33594a0d`](https://github.com/nodejs/node/commit/cd33594a0d)] - **doc**: move shigeki to emeritus (Rich Trott) [#36093](https://github.com/nodejs/node/pull/36093) +* [[`eefc6aa6c9`](https://github.com/nodejs/node/commit/eefc6aa6c9)] - **doc**: document the error when cwd not exists in child\_process.spawn (FeelyChau) [#34505](https://github.com/nodejs/node/pull/34505) +* [[`841a2812d0`](https://github.com/nodejs/node/commit/841a2812d0)] - **doc**: fix typo in debugger.md (Rich Trott) [#36066](https://github.com/nodejs/node/pull/36066) +* [[`500e709439`](https://github.com/nodejs/node/commit/500e709439)] - **doc**: update list styles for remark-parse@9 rendering (Rich Trott) [#36049](https://github.com/nodejs/node/pull/36049) +* [[`a8dab217eb`](https://github.com/nodejs/node/commit/a8dab217eb)] - **doc,url**: fix url.hostname example (Rishabh Mehan) [#33735](https://github.com/nodejs/node/pull/33735) +* [[`e48ec703ba`](https://github.com/nodejs/node/commit/e48ec703ba)] - **domain**: improve deprecation warning text for DEP0097 (Anna Henningsen) [#36136](https://github.com/nodejs/node/pull/36136) +* [[`bcbf176c22`](https://github.com/nodejs/node/commit/bcbf176c22)] - **errors**: refactor to use more primordials (Antoine du Hamel) [#36167](https://github.com/nodejs/node/pull/36167) +* [[`66788970ac`](https://github.com/nodejs/node/commit/66788970ac)] - **esm**: refactor to use more primordials (Antoine du Hamel) [#36019](https://github.com/nodejs/node/pull/36019) +* [[`9ce9b016e6`](https://github.com/nodejs/node/commit/9ce9b016e6)] - **(SEMVER-MINOR)** **events**: add max listener warning for EventTarget (James M Snell) [#36001](https://github.com/nodejs/node/pull/36001) +* [[`1550073dbc`](https://github.com/nodejs/node/commit/1550073dbc)] - **events**: disabled manual construction AbortSignal (raisinten) [#36094](https://github.com/nodejs/node/pull/36094) +* [[`8a6cabbb23`](https://github.com/nodejs/node/commit/8a6cabbb23)] - **events**: port some wpt tests (Ethan Arrowood) [#34169](https://github.com/nodejs/node/pull/34169) +* [[`3691eccf0a`](https://github.com/nodejs/node/commit/3691eccf0a)] - **fs**: remove experimental from promises.rmdir recursive (Anders Kaseorg) [#36131](https://github.com/nodejs/node/pull/36131) +* [[`76b1863240`](https://github.com/nodejs/node/commit/76b1863240)] - **fs**: filehandle read now accepts object as argument (Nikola Glavina) [#34180](https://github.com/nodejs/node/pull/34180) +* [[`2fdf509268`](https://github.com/nodejs/node/commit/2fdf509268)] - **http**: fix typo in comment (Hollow Man) [#36193](https://github.com/nodejs/node/pull/36193) +* [[`8390f8a86b`](https://github.com/nodejs/node/commit/8390f8a86b)] - **(SEMVER-MINOR)** **http**: add support for abortsignal to http.request (Benjamin Gruenbaum) [#36048](https://github.com/nodejs/node/pull/36048) +* [[`387d92fd0e`](https://github.com/nodejs/node/commit/387d92fd0e)] - **http**: onFinish will not be triggered again when finished (rickyes) [#35845](https://github.com/nodejs/node/pull/35845) +* [[`48bf59bb8b`](https://github.com/nodejs/node/commit/48bf59bb8b)] - **http2**: add support for AbortSignal to http2Session.request (Madara Uchiha) [#36070](https://github.com/nodejs/node/pull/36070) +* [[`8a0c3b9c76`](https://github.com/nodejs/node/commit/8a0c3b9c76)] - **http2**: refactor to use more primordials (Antoine du Hamel) [#36142](https://github.com/nodejs/node/pull/36142) +* [[`f0aed8c01c`](https://github.com/nodejs/node/commit/f0aed8c01c)] - **http2**: add support for TypedArray to getUnpackedSettings (Antoine du Hamel) [#36141](https://github.com/nodejs/node/pull/36141) +* [[`9c6be3cc90`](https://github.com/nodejs/node/commit/9c6be3cc90)] - **(SEMVER-MINOR)** **http2**: allow setting the local window size of a session (Yongsheng Zhang) [#35978](https://github.com/nodejs/node/pull/35978) +* [[`0b40568afe`](https://github.com/nodejs/node/commit/0b40568afe)] - **http2**: delay session.receive() by a tick (Szymon Marczak) [#35985](https://github.com/nodejs/node/pull/35985) +* [[`1a4d43f840`](https://github.com/nodejs/node/commit/1a4d43f840)] - **lib**: refactor to use more primordials (Antoine du Hamel) [#36140](https://github.com/nodejs/node/pull/36140) +* [[`d6ea12e003`](https://github.com/nodejs/node/commit/d6ea12e003)] - **lib**: set abort-controller toStringTag (Benjamin Gruenbaum) [#36115](https://github.com/nodejs/node/pull/36115) +* [[`82f1cde57e`](https://github.com/nodejs/node/commit/82f1cde57e)] - **lib**: remove primordials.SafePromise (Antoine du Hamel) [#36149](https://github.com/nodejs/node/pull/36149) +* [[`15ff155c12`](https://github.com/nodejs/node/commit/15ff155c12)] - **(SEMVER-MINOR)** **lib**: add throws option to fs.f/l/statSync (Andrew Casey) [#33716](https://github.com/nodejs/node/pull/33716) +* [[`75707f45eb`](https://github.com/nodejs/node/commit/75707f45eb)] - **lib,tools**: enforce access to prototype from primordials (Antoine du Hamel) [#36025](https://github.com/nodejs/node/pull/36025) +* [[`79b2ba6744`](https://github.com/nodejs/node/commit/79b2ba6744)] - **n-api**: clean up binding creation (Gabriel Schulhof) [#36170](https://github.com/nodejs/node/pull/36170) +* [[`5698cc08f0`](https://github.com/nodejs/node/commit/5698cc08f0)] - **n-api**: fix test\_async\_context warnings (Gabriel Schulhof) [#36171](https://github.com/nodejs/node/pull/36171) +* [[`3d623d850c`](https://github.com/nodejs/node/commit/3d623d850c)] - **n-api**: improve consistency of how we get context (Michael Dawson) [#36068](https://github.com/nodejs/node/pull/36068) +* [[`89da0c3353`](https://github.com/nodejs/node/commit/89da0c3353)] - **n-api**: factor out calling pattern (Gabriel Schulhof) [#36113](https://github.com/nodejs/node/pull/36113) +* [[`5c0ddbca01`](https://github.com/nodejs/node/commit/5c0ddbca01)] - **net**: fix invalid write after end error (Robert Nagy) [#36043](https://github.com/nodejs/node/pull/36043) +* [[`85c85d368a`](https://github.com/nodejs/node/commit/85c85d368a)] - **(SEMVER-MINOR)** **path**: add `path/posix` and `path/win32` alias modules (ExE Boss) [#34962](https://github.com/nodejs/node/pull/34962) +* [[`ed8af3a8b7`](https://github.com/nodejs/node/commit/ed8af3a8b7)] - **perf_hooks**: make nodeTiming a first-class object (Momtchil Momtchev) [#35977](https://github.com/nodejs/node/pull/35977) +* [[`eb9295b583`](https://github.com/nodejs/node/commit/eb9295b583)] - **promise**: emit error on domain unhandled rejections (Benjamin Gruenbaum) [#36082](https://github.com/nodejs/node/pull/36082) +* [[`59af919d6b`](https://github.com/nodejs/node/commit/59af919d6b)] - **querystring**: reduce memory usage by Int8Array (sapics) [#34179](https://github.com/nodejs/node/pull/34179) +* [[`d1baae3640`](https://github.com/nodejs/node/commit/d1baae3640)] - **(SEMVER-MINOR)** **readline**: add getPrompt to get the current prompt (Mattias Runge-Broberg) [#33675](https://github.com/nodejs/node/pull/33675) +* [[`6d1b1c7ad0`](https://github.com/nodejs/node/commit/6d1b1c7ad0)] - **src**: integrate URL::href() and use in inspector (Daijiro Wachi) [#35912](https://github.com/nodejs/node/pull/35912) +* [[`7086f2e653`](https://github.com/nodejs/node/commit/7086f2e653)] - **src**: refactor using-declarations node\_env\_var.cc (raisinten) [#36128](https://github.com/nodejs/node/pull/36128) +* [[`122797e87f`](https://github.com/nodejs/node/commit/122797e87f)] - **src**: remove duplicate logic for getting buffer (Yash Ladha) [#34553](https://github.com/nodejs/node/pull/34553) +* [[`5729478509`](https://github.com/nodejs/node/commit/5729478509)] - **(SEMVER-MINOR)** **src**: add loop idle time in diagnostic report (Gireesh Punathil) [#35940](https://github.com/nodejs/node/pull/35940) +* [[`a81dc9ae18`](https://github.com/nodejs/node/commit/a81dc9ae18)] - **src,crypto**: refactoring of crypto\_context, SecureContext (James M Snell) [#35665](https://github.com/nodejs/node/pull/35665) +* [[`5fa35f6934`](https://github.com/nodejs/node/commit/5fa35f6934)] - **test**: update comments in test-fs-read-offset-null (Rich Trott) [#36152](https://github.com/nodejs/node/pull/36152) +* [[`73bb54af77`](https://github.com/nodejs/node/commit/73bb54af77)] - **test**: update wpt url and resource (Daijiro Wachi) [#36032](https://github.com/nodejs/node/pull/36032) +* [[`77b47dfd08`](https://github.com/nodejs/node/commit/77b47dfd08)] - **test**: fix typo in inspector-helper.js (Luigi Pinca) [#36127](https://github.com/nodejs/node/pull/36127) +* [[`474664963c`](https://github.com/nodejs/node/commit/474664963c)] - **test**: deflake test-http-destroyed-socket-write2 (Luigi Pinca) [#36120](https://github.com/nodejs/node/pull/36120) +* [[`f9bbd35937`](https://github.com/nodejs/node/commit/f9bbd35937)] - **test**: make test-http2-client-jsstream-destroy.js reliable (Rich Trott) [#36129](https://github.com/nodejs/node/pull/36129) +* [[`c19df17acb`](https://github.com/nodejs/node/commit/c19df17acb)] - **test**: add test for fs.read when offset key is null (mayank agarwal) [#35918](https://github.com/nodejs/node/pull/35918) +* [[`9405cddbee`](https://github.com/nodejs/node/commit/9405cddbee)] - **test**: improve test-stream-duplex-readable-end (Luigi Pinca) [#36056](https://github.com/nodejs/node/pull/36056) +* [[`3be5e86c57`](https://github.com/nodejs/node/commit/3be5e86c57)] - **test**: add util.inspect test for null maxStringLength (Rich Trott) [#36086](https://github.com/nodejs/node/pull/36086) +* [[`6a4cc43028`](https://github.com/nodejs/node/commit/6a4cc43028)] - **test**: replace var with const (Aleksandr Krutko) [#36069](https://github.com/nodejs/node/pull/36069) +* [[`a367c0dfc2`](https://github.com/nodejs/node/commit/a367c0dfc2)] - **timers**: refactor to use more primordials (Antoine du Hamel) [#36132](https://github.com/nodejs/node/pull/36132) +* [[`a6ef92bc27`](https://github.com/nodejs/node/commit/a6ef92bc27)] - **tools**: bump unist-util-find@1.0.1 to unist-util-find@1.0.2 (Rich Trott) [#36106](https://github.com/nodejs/node/pull/36106) +* [[`2d2491284e`](https://github.com/nodejs/node/commit/2d2491284e)] - **tools**: only use 2 cores for macos action (Myles Borins) [#36169](https://github.com/nodejs/node/pull/36169) +* [[`d8fcf2c324`](https://github.com/nodejs/node/commit/d8fcf2c324)] - **tools**: remove bashisms from license builder script (Antoine du Hamel) [#36122](https://github.com/nodejs/node/pull/36122) +* [[`7e7ddb11c0`](https://github.com/nodejs/node/commit/7e7ddb11c0)] - **tools**: hide commit queue action link (Antoine du Hamel) [#36124](https://github.com/nodejs/node/pull/36124) +* [[`63494e434a`](https://github.com/nodejs/node/commit/63494e434a)] - **tools**: update doc tools to remark-parse@9.0.0 (Rich Trott) [#36049](https://github.com/nodejs/node/pull/36049) +* [[`bf0550ce4e`](https://github.com/nodejs/node/commit/bf0550ce4e)] - **tools**: enforce use of single quotes in editorconfig (Antoine du Hamel) [#36020](https://github.com/nodejs/node/pull/36020) +* [[`49649a499e`](https://github.com/nodejs/node/commit/49649a499e)] - **tools**: fix config serialization w/ long strings (Ole André Vadla Ravnås) [#35982](https://github.com/nodejs/node/pull/35982) +* [[`be220b213d`](https://github.com/nodejs/node/commit/be220b213d)] - **tools**: update ESLint to 7.13.0 (Luigi Pinca) [#36031](https://github.com/nodejs/node/pull/36031) +* [[`4140f491fd`](https://github.com/nodejs/node/commit/4140f491fd)] - **util**: fix to inspect getters that access this (raisinten) [#36052](https://github.com/nodejs/node/pull/36052) +* [[`baa87c1a7d`](https://github.com/nodejs/node/commit/baa87c1a7d)] - **(SEMVER-MINOR)** **util**: add `util/types` alias module (ExE Boss) [#34055](https://github.com/nodejs/node/pull/34055) +* [[`f7b2fce1c1`](https://github.com/nodejs/node/commit/f7b2fce1c1)] - **vm**: refactor to use more primordials (Antoine du Hamel) [#36023](https://github.com/nodejs/node/pull/36023) +* [[`4e3883ec2d`](https://github.com/nodejs/node/commit/4e3883ec2d)] - **win,build,tools**: support VS prerelease (Baruch Odem) [#36033](https://github.com/nodejs/node/pull/36033) + ## 2020-11-16, Version 15.2.1 (Current), @targos diff --git a/src/node_version.h b/src/node_version.h index 6b087dceecdaa7..afd6cea22f3227 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 15 -#define NODE_MINOR_VERSION 2 -#define NODE_PATCH_VERSION 2 +#define NODE_MINOR_VERSION 3 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)