diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 63e76a27fab182..f85c224898db30 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -14,6 +14,3 @@ Contributors guide: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md - [ ] tests and/or benchmarks are included - [ ] documentation is changed or added - [ ] commit message follows [commit guidelines](https://github.com/nodejs/node/blob/master/doc/guides/contributing/pull-requests.md#commit-message-guidelines) - -##### Affected core subsystem(s) - diff --git a/CHANGELOG.md b/CHANGELOG.md index ccdaea301e600e..ace79efd98eb37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,7 +29,9 @@ release. -9.7.0
+9.8.0
+9.7.1
+9.7.0
9.6.1
9.6.0
9.5.0
diff --git a/Makefile b/Makefile index 35ce00a043065f..e9afdc87e72758 100644 --- a/Makefile +++ b/Makefile @@ -172,13 +172,13 @@ coverage-build: all $(NODE) ./deps/npm install istanbul-merge --no-save --no-package-lock; fi if [ ! -d node_modules/nyc ]; then \ $(NODE) ./deps/npm install nyc --no-save --no-package-lock; fi - if [ ! -d gcovr ]; then git clone --depth=1 \ + if [ ! -d gcovr ]; then git clone -b 3.4 --depth=1 \ --single-branch git://github.com/gcovr/gcovr.git; fi if [ ! -d build ]; then git clone --depth=1 \ --single-branch https://github.com/nodejs/build.git; fi if [ ! -f gcovr/scripts/gcovr.orig ]; then \ (cd gcovr && patch -N -p1 < \ - "$(CURDIR)/build/jenkins/scripts/coverage/gcovr-patches.diff"); fi + "$(CURDIR)/build/jenkins/scripts/coverage/gcovr-patches-3.4.diff"); fi if [ -d lib_ ]; then $(RM) -r lib; mv lib_ lib; fi mv lib lib_ $(NODE) ./node_modules/.bin/nyc instrument --extension .js --extension .mjs lib_/ lib/ @@ -1056,15 +1056,18 @@ lint-md-clean: $(RM) -r tools/remark-preset-lint-node/node_modules $(RM) tools/.*mdlintstamp -.PHONY: lint-md-build -lint-md-build: - @if [ ! -d tools/remark-cli/node_modules ]; then \ - echo "Markdown linter: installing remark-cli into tools/"; \ - cd tools/remark-cli && $(call available-node,$(run-npm-install)) fi - @if [ ! -d tools/remark-preset-lint-node/node_modules ]; then \ - echo "Markdown linter: installing remark-preset-lint-node into tools/"; \ - cd tools/remark-preset-lint-node && $(call available-node,$(run-npm-install)) fi +tools/remark-cli/node_modules: tools/remark-cli/package.json + @echo "Markdown linter: installing remark-cli into tools/" + @cd tools/remark-cli && $(call available-node,$(run-npm-install)) + +tools/remark-preset-lint-node/node_modules: \ + tools/remark-preset-lint-node/package.json + @echo "Markdown linter: installing remark-preset-lint-node into tools/" + @cd tools/remark-preset-lint-node && $(call available-node,$(run-npm-install)) +.PHONY: lint-md-build +lint-md-build: tools/remark-cli/node_modules \ + tools/remark-preset-lint-node/node_modules .PHONY: lint-md ifneq ("","$(wildcard tools/remark-cli/node_modules/)") diff --git a/README.md b/README.md index 9dda47e821fc68..1e6be9a38cbe1b 100644 --- a/README.md +++ b/README.md @@ -425,6 +425,8 @@ For more information about the governance of the Node.js project, see **Julien Gilli** <jgilli@nodejs.org> * [mmarchini](https://github.com/mmarchini) - **Matheus Marchini** <matheus@sthima.com> +* [MoonBall](https://github.com/MoonBall) - +**Chen Gang** <gangc.cxy@foxmail.com> * [mscdex](https://github.com/mscdex) - **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - diff --git a/benchmark/streams/pipe.js b/benchmark/streams/pipe.js index a7d67b7d6908c8..4baeeb2d2e7706 100644 --- a/benchmark/streams/pipe.js +++ b/benchmark/streams/pipe.js @@ -8,7 +8,7 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - const b = new Buffer(1024); + const b = Buffer.alloc(1024); const r = new Readable(); const w = new Writable(); diff --git a/benchmark/streams/readable-bigread.js b/benchmark/streams/readable-bigread.js index 99213afaeb8f28..62d1af874fb22a 100644 --- a/benchmark/streams/readable-bigread.js +++ b/benchmark/streams/readable-bigread.js @@ -8,7 +8,7 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - const b = new Buffer(32); + const b = Buffer.alloc(32); const s = new Readable(); function noop() {} s._read = noop; diff --git a/benchmark/streams/readable-bigunevenread.js b/benchmark/streams/readable-bigunevenread.js index e2f2c1406a1da0..e13769189a69da 100644 --- a/benchmark/streams/readable-bigunevenread.js +++ b/benchmark/streams/readable-bigunevenread.js @@ -8,7 +8,7 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - const b = new Buffer(32); + const b = Buffer.alloc(32); const s = new Readable(); function noop() {} s._read = noop; diff --git a/benchmark/streams/readable-readall.js b/benchmark/streams/readable-readall.js index 5715e42017c795..3c177ec4c39988 100644 --- a/benchmark/streams/readable-readall.js +++ b/benchmark/streams/readable-readall.js @@ -8,7 +8,7 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - const b = new Buffer(32); + const b = Buffer.alloc(32); const s = new Readable(); function noop() {} s._read = noop; diff --git a/benchmark/streams/readable-unevenread.js b/benchmark/streams/readable-unevenread.js index d7a408b1c56a31..f8b501ab4729ec 100644 --- a/benchmark/streams/readable-unevenread.js +++ b/benchmark/streams/readable-unevenread.js @@ -8,7 +8,7 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - const b = new Buffer(32); + const b = Buffer.alloc(32); const s = new Readable(); function noop() {} s._read = noop; diff --git a/common.gypi b/common.gypi index 5752c17168d50a..aaae133e1ac188 100644 --- a/common.gypi +++ b/common.gypi @@ -27,7 +27,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.20', + 'v8_embedder_string': '-node.21', # Enable disassembler for `--print-code` v8 options 'v8_enable_disassembler': 1, diff --git a/deps/openssl/openssl.gyp b/deps/openssl/openssl.gyp index 88994a12b22097..170e5fb68bb503 100644 --- a/deps/openssl/openssl.gyp +++ b/deps/openssl/openssl.gyp @@ -157,6 +157,9 @@ }, { 'defines': ['<@(openssl_default_defines_not_win)'], 'cflags': ['-Wno-missing-field-initializers'], + 'xcode_settings': { + 'WARNING_CFLAGS': ['-Wno-missing-field-initializers'], + }, 'conditions': [ ['OS=="mac"', { 'defines': ['<@(openssl_default_defines_mac)'], diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index dc211962685b77..950436493caa04 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -5299,6 +5299,11 @@ Genesis::Genesis( if (!InstallDebuggerNatives()) return; } + if (FLAG_disallow_code_generation_from_strings) { + native_context()->set_allow_code_gen_from_strings( + isolate->heap()->false_value()); + } + ConfigureUtilsObject(context_type); // Check that the script context table is empty except for the 'this' binding. diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index bcb5a2c982b5a7..fe175c706accb3 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -732,6 +732,8 @@ DEFINE_BOOL(expose_trigger_failure, false, "expose trigger-failure extension") DEFINE_INT(stack_trace_limit, 10, "number of stack frames to capture") DEFINE_BOOL(builtins_in_stack_traces, false, "show built-in functions in stack traces") +DEFINE_BOOL(disallow_code_generation_from_strings, false, + "disallow eval and friends") // builtins.cc DEFINE_BOOL(allow_unsafe_function_constructor, false, diff --git a/deps/v8/test/mjsunit/disallow-codegen-from-strings.js b/deps/v8/test/mjsunit/disallow-codegen-from-strings.js new file mode 100644 index 00000000000000..30d1b967d5f128 --- /dev/null +++ b/deps/v8/test/mjsunit/disallow-codegen-from-strings.js @@ -0,0 +1,9 @@ +// Copyright 2017 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --disallow-code-generation-from-strings + +assertThrows("1 + 1", EvalError); +assertThrows(() => eval("1 + 1"), EvalError); +assertThrows(() => Function("x", "return x + 1"), EvalError); diff --git a/doc/api/_toc.md b/doc/api/_toc.md index 420fb362fa537a..b8002fb78163ac 100644 --- a/doc/api/_toc.md +++ b/doc/api/_toc.md @@ -1,5 +1,9 @@ @// NB(chrisdickinson): if you move this file, be sure to update tools/doc/html.js to @// point at the new location. +@// tools/doc/html.js to point at the new location. + + + * [About these Docs](documentation.html) * [Usage & Example](synopsis.html) diff --git a/doc/api/addons.md b/doc/api/addons.md index 3641a2d6ba224a..7b7f780a74822d 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -34,8 +34,9 @@ involving knowledge of several components and APIs : - Node.js includes a number of other statically linked libraries including OpenSSL. These other libraries are located in the `deps/` directory in the - Node.js source tree. Only the V8 and OpenSSL symbols are purposefully - re-exported by Node.js and may be used to various extents by Addons. + Node.js source tree. Only the libuv, OpenSSL, V8 and zlib symbols are + purposefully re-exported by Node.js and may be used to various extents by + Addons. See [Linking to Node.js' own dependencies][] for additional information. All of the following examples are available for [download][] and may diff --git a/doc/api/cli.md b/doc/api/cli.md index 6bae0a67c3d101..e76fa057c2472d 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -235,6 +235,14 @@ added: v7.7.0 A comma separated list of categories that should be traced when trace event tracing is enabled using `--trace-events-enabled`. +### `--trace-event-file-pattern` + + +Template string specifying the filepath for the trace event data, it +supports `${rotation}` and `${pid}`. + ### `--zero-fill-buffers` * `path` {string|Buffer|URL} +* Returns: {boolean} Synchronous version of [`fs.exists()`][]. Returns `true` if the path exists, `false` otherwise. @@ -1546,6 +1549,7 @@ added: v0.1.95 --> * `fd` {integer} +* Returns: {fs.Stats} Synchronous fstat(2). Returns an instance of [`fs.Stats`][]. @@ -1820,6 +1824,7 @@ changes: --> * `path` {string|Buffer|URL} +* Returns: {fs.Stats} Synchronous lstat(2). Returns an instance of [`fs.Stats`][]. @@ -1945,6 +1950,7 @@ added: v5.10.0 * `prefix` {string} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` +* Returns: {string} The synchronous version of [`fs.mkdtemp()`][]. Returns the created folder path. @@ -2069,6 +2075,7 @@ changes: * `path` {string|Buffer|URL} * `flags` {string|number} * `mode` {integer} **Default:** `0o666` +* Returns: {number} Synchronous version of [`fs.open()`][]. Returns an integer representing the file descriptor. @@ -2159,6 +2166,7 @@ changes: * `path` {string|Buffer|URL} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` +* Returns: {Array} An array of filenames Synchronous readdir(3). Returns an array of filenames excluding `'.'` and `'..'`. @@ -2260,6 +2268,7 @@ changes: * `options` {Object|string} * `encoding` {string|null} **Default:** `null` * `flag` {string} **Default:** `'r'` +* Returns: {string|Buffer} Synchronous version of [`fs.readFile()`][]. Returns the contents of the `path`. @@ -2320,6 +2329,7 @@ changes: * `path` {string|Buffer|URL} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` +* Returns: {string|Buffer} Synchronous readlink(2). Returns the symbolic link's string value. @@ -2342,6 +2352,7 @@ changes: * `offset` {integer} * `length` {integer} * `position` {integer} +* Returns: {number} Synchronous version of [`fs.read()`][]. Returns the number of `bytesRead`. @@ -2452,6 +2463,7 @@ changes: * `path` {string|Buffer|URL} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` +* Returns: {string|Buffer} Synchronously computes the canonical pathname by resolving `.`, `..` and symbolic links. @@ -2482,6 +2494,7 @@ added: v9.2.0 * `path` {string|Buffer|URL} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` +* Returns: {string|Buffer} Synchronous realpath(3). @@ -2516,8 +2529,19 @@ changes: * `callback` {Function} * `err` {Error} -Asynchronous rename(2). No arguments other than a possible exception are given -to the completion callback. +Asynchronously rename file at `oldPath` to the pathname provided +as `newPath`. In the case that `newPath` already exists, it will +be overwritten. No arguments other than a possible exception are +given to the completion callback. + +See also: rename(2). + +```js +fs.rename('oldFile.txt', 'newFile.txt', (err) => { + if (err) throw err; + console.log('Rename complete!'); +}); +``` ## fs.renameSync(oldPath, newPath) * `path` {string|Buffer|URL} +* Returns: {fs.Stats} Synchronous stat(2). Returns an instance of [`fs.Stats`][]. @@ -3197,6 +3222,7 @@ changes: * `offset` {integer} * `length` {integer} * `position` {integer} +* Returns: {number} ## fs.writeSync(fd, string[, position[, encoding]]) The default evaluator will, by default, assign the result of the most recently evaluated expression to the special variable `_` (underscore). @@ -162,6 +168,17 @@ Expression assignment to _ now disabled. 4 ``` +Similarly, `_error` will refer to the last seen error, if there was any. +Explicitly setting `_error` to a value will disable this behavior. + + +```js +> throw new Error('foo'); +Error: foo +> _error.message +'foo' +``` + ### Custom Evaluation Functions When a new `repl.REPLServer` is created, a custom evaluation function may be diff --git a/doc/api/stream.md b/doc/api/stream.md index 424f3cfc33924c..b15d4745266588 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -323,6 +323,9 @@ The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called on a [Readable][] stream, removing this [Writable][] from its set of destinations. +This is also emitted in case this [Writable][] stream emits an error when a +[Readable][] stream pipes into it. + ```js const writer = getWritableStreamSomehow(); const reader = getReadableStreamSomehow(); @@ -1496,6 +1499,9 @@ the callback and passing the error as the first argument. This will cause an on how the stream is being used. Using the callback ensures consistent and predictable handling of errors. +If a Readable stream pipes into a Writable stream when Writable emits an +error, the Readable stream will be unpiped. + ```js const { Writable } = require('stream'); diff --git a/doc/api/tracing.md b/doc/api/tracing.md index e03477b1adf20c..13caa2a509bf74 100644 --- a/doc/api/tracing.md +++ b/doc/api/tracing.md @@ -19,3 +19,16 @@ node --trace-events-enabled --trace-event-categories v8,node,node.async_hooks se Running Node.js with tracing enabled will produce log files that can be opened in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + +The logging file is by default called `node_trace.${rotation}.log`, where +`${rotation}` is an incrementing log-rotation id. The filepath pattern can +be specified with `--trace-event-file-pattern` that accepts a template +string that supports `${rotation}` and `${pid}`. For example: + +```txt +node --trace-events-enabled --trace-event-file-pattern '${pid}-${rotation}.log' server.js +``` + +Starting with Node 10.0.0, the tracing system uses the same time source as the +one used by `process.hrtime()` however the trace-event timestamps are expressed +in microseconds, unlike `process.hrtime()` which returns nanoseconds. diff --git a/doc/api/url.md b/doc/api/url.md index 38cb95d6097ae9..a423061f7b2945 100644 --- a/doc/api/url.md +++ b/doc/api/url.md @@ -971,6 +971,20 @@ changes: The `url.format()` method returns a formatted URL string derived from `urlObject`. +```js +url.format({ + protocol: 'https', + hostname: 'example.com', + pathname: '/some/path', + query: { + page: 1, + format: 'json' + } +}); + +// => 'https://example.com/some/path?page=1&format=json' +``` + If `urlObject` is not an object or a string, `url.format()` will throw a [`TypeError`][]. diff --git a/doc/api/util.md b/doc/api/util.md index 7475e36f1c2013..976621ddb383e3 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -190,6 +190,7 @@ contains circular references. Similar to `util.inspect()` without options. This will show the full object not including non-enumerable symbols and properties. * `%%` - single percent sign (`'%'`). This does not consume an argument. +* Returns: {string} The formatted string If the placeholder does not have a corresponding argument, the placeholder is not replaced. @@ -491,7 +492,7 @@ added: v9.0.0 * `val2` {any} * Returns: {boolean} -Returns `true` if there is deep strict equality between `val` and `val2`. +Returns `true` if there is deep strict equality between `val1` and `val2`. Otherwise, returns `false`. See [`assert.deepStrictEqual()`][] for more information about deep strict @@ -806,6 +807,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Internal alias for [`Array.isArray`][]. @@ -831,6 +833,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Boolean`. Otherwise, returns `false`. @@ -854,6 +857,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated: Use [`Buffer.isBuffer()`][] instead. * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Buffer`. Otherwise, returns `false`. @@ -877,6 +881,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Date`. Otherwise, returns `false`. @@ -900,6 +905,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is an [`Error`][]. Otherwise, returns `false`. @@ -939,6 +945,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Function`. Otherwise, returns `false`. @@ -966,6 +973,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is strictly `null`. Otherwise, returns `false`. @@ -990,6 +998,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is `null` or `undefined`. Otherwise, returns `false`. @@ -1014,6 +1023,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Number`. Otherwise, returns `false`. @@ -1039,6 +1049,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is strictly an `Object` **and** not a `Function`. Otherwise, returns `false`. @@ -1065,6 +1076,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a primitive type. Otherwise, returns `false`. @@ -1101,6 +1113,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `RegExp`. Otherwise, returns `false`. @@ -1124,6 +1137,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `string`. Otherwise, returns `false`. @@ -1149,6 +1163,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is a `Symbol`. Otherwise, returns `false`. @@ -1172,6 +1187,7 @@ deprecated: v4.0.0 > Stability: 0 - Deprecated * `object` {any} +* Returns: {boolean} Returns `true` if the given `object` is `undefined`. Otherwise, returns `false`. diff --git a/doc/changelogs/CHANGELOG_V9.md b/doc/changelogs/CHANGELOG_V9.md index f30a722abd655b..35107dc996d63f 100644 --- a/doc/changelogs/CHANGELOG_V9.md +++ b/doc/changelogs/CHANGELOG_V9.md @@ -8,6 +8,7 @@ +9.8.0
9.7.1
9.7.0
9.6.1
@@ -34,6 +35,99 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2018-03-07, Version 9.8.0 (Current), @MylesBorins + +### Notable Changes + +* **crypto**: + - add cert.fingerprint256 as SHA256 fingerprint (Hannes Magnusson) [#17690](https://github.com/nodejs/node/pull/17690) +* **http2**: + - Fixed issues with aborted connections in the HTTP/2 implementation (Anna Henningsen) [#18987](https://github.com/nodejs/node/pull/18987) [#19002](https://github.com/nodejs/node/pull/19002) +* **loader**: + - --inspect-brk now works properly for esmodules (Gus Caplan) [#18949](https://github.com/nodejs/node/pull/18949) +* **src**: + - make process.dlopen() load well-known symbol (Ben Noordhuis) [#18934](https://github.com/nodejs/node/pull/18934) +* **trace_events**: + - add file pattern cli option (Andreas Madsen) [#18480](https://github.com/nodejs/node/pull/18480) +* **Added new collaborators** + - [MoonBall](https://github.com/MoonBall) Chen Gang + +### Commits + +* [[`6ae2cafde3`](https://github.com/nodejs/node/commit/6ae2cafde3)] - **buffer**: coerce offset to integer (Ruben Bridgewater) [#18215](https://github.com/nodejs/node/pull/18215) +* [[`6d17383041`](https://github.com/nodejs/node/commit/6d17383041)] - **buffer**: fix typo in lib/buffer.js (Ujjwal Sharma) [#19126](https://github.com/nodejs/node/pull/19126) +* [[`4b34b2e185`](https://github.com/nodejs/node/commit/4b34b2e185)] - **build**: fix gocvr version used for coverage (Michael Dawson) [#19094](https://github.com/nodejs/node/pull/19094) +* [[`a938e52ffe`](https://github.com/nodejs/node/commit/a938e52ffe)] - **build**: disable openssl build warnings on macos (Ben Noordhuis) [#19046](https://github.com/nodejs/node/pull/19046) +* [[`44d80c5620`](https://github.com/nodejs/node/commit/44d80c5620)] - **build**: fix coverage after gcovr update (killagu) [#18958](https://github.com/nodejs/node/pull/18958) +* [[`28a5362e83`](https://github.com/nodejs/node/commit/28a5362e83)] - **build**: fix lint-md-build dependency (Joyee Cheung) [#18981](https://github.com/nodejs/node/pull/18981) +* [[`e74e422a53`](https://github.com/nodejs/node/commit/e74e422a53)] - **(SEMVER-MINOR)** **crypto**: add cert.fingerprint256 as SHA256 fingerprint (Hannes Magnusson) [#17690](https://github.com/nodejs/node/pull/17690) +* [[`056001dc8f`](https://github.com/nodejs/node/commit/056001dc8f)] - **(SEMVER-MINOR)** **deps**: cherry-pick 0bcb1d6f from upstream V8 (Jakob Kummerow) [#18212](https://github.com/nodejs/node/pull/18212) +* [[`1fadb2edb4`](https://github.com/nodejs/node/commit/1fadb2edb4)] - **doc**: fix/add link to Android info (Vse Mozhet Byt) [#19004](https://github.com/nodejs/node/pull/19004) +* [[`68524610f2`](https://github.com/nodejs/node/commit/68524610f2)] - **doc**: remove subsystem from pull request template (Rich Trott) [#19125](https://github.com/nodejs/node/pull/19125) +* [[`d3a70e9cd4`](https://github.com/nodejs/node/commit/d3a70e9cd4)] - **doc**: remove tentativeness in pull-requests.md (Rich Trott) [#19123](https://github.com/nodejs/node/pull/19123) +* [[`f03079fce6`](https://github.com/nodejs/node/commit/f03079fce6)] - **doc**: update cc list (Ruben Bridgewater) [#19099](https://github.com/nodejs/node/pull/19099) +* [[`9d2de16b13`](https://github.com/nodejs/node/commit/9d2de16b13)] - **doc**: add introduced\_in metadata to \_toc.md (Rich Trott) [#19113](https://github.com/nodejs/node/pull/19113) +* [[`ae2dabb8fc`](https://github.com/nodejs/node/commit/ae2dabb8fc)] - **doc**: new team for bundlers or delivery of Node.js (Michael Dawson) [#19098](https://github.com/nodejs/node/pull/19098) +* [[`0e4f4266a1`](https://github.com/nodejs/node/commit/0e4f4266a1)] - **doc**: add simple example to rename function (punteek) [#18812](https://github.com/nodejs/node/pull/18812) +* [[`e42600fc4b`](https://github.com/nodejs/node/commit/e42600fc4b)] - **doc**: add missing `Returns` in fs & util (Sho Miyamoto) [#18775](https://github.com/nodejs/node/pull/18775) +* [[`4ecf5bbe74`](https://github.com/nodejs/node/commit/4ecf5bbe74)] - **doc**: fix a typo in util.isDeepStrictEqual (Sho Miyamoto) [#18775](https://github.com/nodejs/node/pull/18775) +* [[`cab6c8e95c`](https://github.com/nodejs/node/commit/cab6c8e95c)] - **doc**: add URL.format() example (Zeke Sikelianos) [#18888](https://github.com/nodejs/node/pull/18888) +* [[`a4462b7944`](https://github.com/nodejs/node/commit/a4462b7944)] - **doc**: fix n-api asynchronous threading docs (Eric Bickle) [#19073](https://github.com/nodejs/node/pull/19073) +* [[`bfa894cf37`](https://github.com/nodejs/node/commit/bfa894cf37)] - **doc**: add MoonBall to collaborators (Chen Gang) [#19109](https://github.com/nodejs/node/pull/19109) +* [[`77154cd65d`](https://github.com/nodejs/node/commit/77154cd65d)] - **doc**: update list of re-exported symbols (Richard Lau) [#19013](https://github.com/nodejs/node/pull/19013) +* [[`459f2095a1`](https://github.com/nodejs/node/commit/459f2095a1)] - **doc**: Readable unpipe on Writable error event (George Sapkin) [#18080](https://github.com/nodejs/node/pull/18080) +* [[`68c1337819`](https://github.com/nodejs/node/commit/68c1337819)] - **doc**: add RegExp Unicode Property Escapes to intl (Vse Mozhet Byt) [#19052](https://github.com/nodejs/node/pull/19052) +* [[`71d09ecbf1`](https://github.com/nodejs/node/commit/71d09ecbf1)] - **doc**: make the background section concise and improve its formality (Wilson) [#18928](https://github.com/nodejs/node/pull/18928) +* [[`951054004d`](https://github.com/nodejs/node/commit/951054004d)] - **doc**: lowercase primitives in test/common/README.md (Vse Mozhet Byt) [#18875](https://github.com/nodejs/node/pull/18875) +* [[`5b8c97f6bc`](https://github.com/nodejs/node/commit/5b8c97f6bc)] - **events**: show throw stack trace for uncaught exception (Anna Henningsen) [#19003](https://github.com/nodejs/node/pull/19003) +* [[`0789eeceb6`](https://github.com/nodejs/node/commit/0789eeceb6)] - **http**: prevent aborted event when already completed (Andrew Johnston) [#18999](https://github.com/nodejs/node/pull/18999) +* [[`ae4d83facf`](https://github.com/nodejs/node/commit/ae4d83facf)] - **http**: prevent aborted event when already completed (Andrew Johnston) [#18999](https://github.com/nodejs/node/pull/18999) +* [[`50d1233935`](https://github.com/nodejs/node/commit/50d1233935)] - **http2**: no stream destroy while its data is on the wire (Anna Henningsen) [#19002](https://github.com/nodejs/node/pull/19002) +* [[`551d9752c8`](https://github.com/nodejs/node/commit/551d9752c8)] - **http2**: fix flaky test-http2-https-fallback (Matteo Collina) [#19093](https://github.com/nodejs/node/pull/19093) +* [[`8bc930c269`](https://github.com/nodejs/node/commit/8bc930c269)] - **http2**: fix endless loop when writing empty string (Anna Henningsen) [#18924](https://github.com/nodejs/node/pull/18924) +* [[`aa0fca9426`](https://github.com/nodejs/node/commit/aa0fca9426)] - **http2**: use original error for cancelling pending streams (Anna Henningsen) [#18988](https://github.com/nodejs/node/pull/18988) +* [[`447136999d`](https://github.com/nodejs/node/commit/447136999d)] - **http2**: send error text in case of ALPN mismatch (Anna Henningsen) [#18986](https://github.com/nodejs/node/pull/18986) +* [[`ef8f90f34e`](https://github.com/nodejs/node/commit/ef8f90f34e)] - **http2**: fix condition where data is lost (Matteo Collina) [#18895](https://github.com/nodejs/node/pull/18895) +* [[`e584113b66`](https://github.com/nodejs/node/commit/e584113b66)] - **lib**: re-fix v8\_prof\_processor (Anna Henningsen) [#19059](https://github.com/nodejs/node/pull/19059) +* [[`12856b0dd2`](https://github.com/nodejs/node/commit/12856b0dd2)] - **lib**: change hook -\> hooks in code comment (Daniel Bevenius) [#19053](https://github.com/nodejs/node/pull/19053) +* [[`db8d197e79`](https://github.com/nodejs/node/commit/db8d197e79)] - **lib,test**: remove yoda statements (Ruben Bridgewater) [#18746](https://github.com/nodejs/node/pull/18746) +* [[`59547cc438`](https://github.com/nodejs/node/commit/59547cc438)] - **loader**: fix --inspect-brk (Gus Caplan) [#18949](https://github.com/nodejs/node/pull/18949) +* [[`39e032fe86`](https://github.com/nodejs/node/commit/39e032fe86)] - **module**: fix main lookup regression from #18728 (Guy Bedford) [#18788](https://github.com/nodejs/node/pull/18788) +* [[`f3e3429296`](https://github.com/nodejs/node/commit/f3e3429296)] - **module**: support main w/o extension, pjson cache (Guy Bedford) [#18728](https://github.com/nodejs/node/pull/18728) +* [[`95f6467ffd`](https://github.com/nodejs/node/commit/95f6467ffd)] - **module**: fix cyclical dynamic import (Bradley Farias) [#18965](https://github.com/nodejs/node/pull/18965) +* [[`5c4f703607`](https://github.com/nodejs/node/commit/5c4f703607)] - **n-api**: update reference test (Gabriel Schulhof) [#19086](https://github.com/nodejs/node/pull/19086) +* [[`1b32fc3276`](https://github.com/nodejs/node/commit/1b32fc3276)] - **n-api**: fix object test (Gabriel Schulhof) [#19039](https://github.com/nodejs/node/pull/19039) +* [[`ef4714c2b6`](https://github.com/nodejs/node/commit/ef4714c2b6)] - **net**: inline and simplify onSocketEnd (Anna Henningsen) [#18607](https://github.com/nodejs/node/pull/18607) +* [[`28880cf89d`](https://github.com/nodejs/node/commit/28880cf89d)] - **perf_hooks**: fix timing (Timothy Gu) [#18993](https://github.com/nodejs/node/pull/18993) +* [[`96f0bec48b`](https://github.com/nodejs/node/commit/96f0bec48b)] - **repl**: make last error available as `\_error` (Anna Henningsen) [#18919](https://github.com/nodejs/node/pull/18919) +* [[`420d56c2ea`](https://github.com/nodejs/node/commit/420d56c2ea)] - **src**: don't touch js object in Http2Session dtor (Ben Noordhuis) [#18656](https://github.com/nodejs/node/pull/18656) +* [[`f89f659dcf`](https://github.com/nodejs/node/commit/f89f659dcf)] - **src**: remove unnecessary Reset() calls (Ben Noordhuis) [#18656](https://github.com/nodejs/node/pull/18656) +* [[`67a9742aed`](https://github.com/nodejs/node/commit/67a9742aed)] - **src**: prevent persistent handle resource leaks (Ben Noordhuis) [#18656](https://github.com/nodejs/node/pull/18656) +* [[`08bcdde888`](https://github.com/nodejs/node/commit/08bcdde888)] - **(SEMVER-MINOR)** **src**: handle exceptions in env-\>SetImmediates (James M Snell) [#18297](https://github.com/nodejs/node/pull/18297) +* [[`cc52dae7c4`](https://github.com/nodejs/node/commit/cc52dae7c4)] - **src**: #include \" to iculslocs (Steven R. Loomis) [#19150](https://github.com/nodejs/node/pull/19150) +* [[`2f17c52674`](https://github.com/nodejs/node/commit/2f17c52674)] - **src**: use std::unique\_ptr for STACK\_OF(X509) (Ben Noordhuis) [#19087](https://github.com/nodejs/node/pull/19087) +* [[`f10470ce2d`](https://github.com/nodejs/node/commit/f10470ce2d)] - **src**: refactor GetPeerCertificate (Daniel Bevenius) [#19087](https://github.com/nodejs/node/pull/19087) +* [[`4fae6e3904`](https://github.com/nodejs/node/commit/4fae6e3904)] - **(SEMVER-MINOR)** **src**: make process.dlopen() load well-known symbol (Ben Noordhuis) [#18934](https://github.com/nodejs/node/pull/18934) +* [[`89edbae7ab`](https://github.com/nodejs/node/commit/89edbae7ab)] - **(SEMVER-MINOR)** **src**: clean up process.dlopen() (Ben Noordhuis) [#18934](https://github.com/nodejs/node/pull/18934) +* [[`08b83ee27a`](https://github.com/nodejs/node/commit/08b83ee27a)] - **src**: refactor setting JS properties on WriteWrap (Anna Henningsen) [#18963](https://github.com/nodejs/node/pull/18963) +* [[`4d5cd5c6c5`](https://github.com/nodejs/node/commit/4d5cd5c6c5)] - **src**: fix error message in async\_hooks constructor (Daniel Bevenius) [#19000](https://github.com/nodejs/node/pull/19000) +* [[`6787913a68`](https://github.com/nodejs/node/commit/6787913a68)] - **test**: add more information to assert.strictEqual (Ujjwal Sharma) [#19162](https://github.com/nodejs/node/pull/19162) +* [[`ee653ecd09`](https://github.com/nodejs/node/commit/ee653ecd09)] - **test**: move require http2 to after crypto check (Daniel Bevenius) [#19111](https://github.com/nodejs/node/pull/19111) +* [[`5bbf009c1d`](https://github.com/nodejs/node/commit/5bbf009c1d)] - **test**: check symbols in shared lib (Yihong Wang) [#18806](https://github.com/nodejs/node/pull/18806) +* [[`d8833762cb`](https://github.com/nodejs/node/commit/d8833762cb)] - **test**: refactor test-async-wrap-getasyncid (Santiago Gimeno) [#18727](https://github.com/nodejs/node/pull/18727) +* [[`23107ba7b1`](https://github.com/nodejs/node/commit/23107ba7b1)] - **test**: remove assert message and add block scope (wuweiweiwu) [#19054](https://github.com/nodejs/node/pull/19054) +* [[`cc90bbd0f4`](https://github.com/nodejs/node/commit/cc90bbd0f4)] - **test**: fix flaky inspector-stop-profile-after-done (Rich Trott) [#18126](https://github.com/nodejs/node/pull/18126) +* [[`8d595bb25c`](https://github.com/nodejs/node/commit/8d595bb25c)] - **test**: check endless loop while writing empty string (XadillaX) [#18924](https://github.com/nodejs/node/pull/18924) +* [[`a4550069ca`](https://github.com/nodejs/node/commit/a4550069ca)] - **test**: allow running with `NODE\_PENDING\_DEPRECATION` (Anna Henningsen) [#18991](https://github.com/nodejs/node/pull/18991) +* [[`fd27165f73`](https://github.com/nodejs/node/commit/fd27165f73)] - **test**: specify 'dir' for directory symlinks (Kyle Farnung) [#19049](https://github.com/nodejs/node/pull/19049) +* [[`eca333a6e8`](https://github.com/nodejs/node/commit/eca333a6e8)] - **test**: refactor test after review (Andrew Johnston) [#18999](https://github.com/nodejs/node/pull/18999) +* [[`c943cd09a7`](https://github.com/nodejs/node/commit/c943cd09a7)] - **test**: fix repl-tab-complete --without-ssl (Daniel Bevenius) [#17867](https://github.com/nodejs/node/pull/17867) +* [[`f864509991`](https://github.com/nodejs/node/commit/f864509991)] - **test,benchmark**: use new Buffer API where appropriate (Сковорода Никита Андреевич) [#18980](https://github.com/nodejs/node/pull/18980) +* [[`479b622e49`](https://github.com/nodejs/node/commit/479b622e49)] - **tls,http2**: handle writes after SSL destroy more gracefully (Anna Henningsen) [#18987](https://github.com/nodejs/node/pull/18987) +* [[`3d4cda3a7d`](https://github.com/nodejs/node/commit/3d4cda3a7d)] - **(SEMVER-MINOR)** **trace_events**: add file pattern cli option (Andreas Madsen) [#18480](https://github.com/nodejs/node/pull/18480) +* [[`3e8e1524ac`](https://github.com/nodejs/node/commit/3e8e1524ac)] - **util**: use blue on non-windows systems for number (Gus Caplan) [#18925](https://github.com/nodejs/node/pull/18925) + ## 2018-03-02, Version 9.7.1 (Current), @rvagg diff --git a/doc/guides/contributing/pull-requests.md b/doc/guides/contributing/pull-requests.md index 4fd2de923a0e48..7ac8389e313349 100644 --- a/doc/guides/contributing/pull-requests.md +++ b/doc/guides/contributing/pull-requests.md @@ -401,10 +401,10 @@ seem unfamiliar, refer to this All Pull Requests require "sign off" in order to land. Whenever a contributor reviews a Pull Request they may find specific details that they would like to see changed or fixed. These may be as simple as fixing a typo, or may involve -substantive changes to the code you have written. In general, such requests -are intended to be helpful, but at times may come across as abrupt or unhelpful, -especially requests to change things that do not include concrete suggestions -on *how* to change them. +substantive changes to the code you have written. While such requests are +intended to be helpful, they may come across as abrupt or unhelpful, especially +requests to change things that do not include concrete suggestions on *how* to +change them. Try not to be discouraged. If you feel that a particular review is unfair, say so, or contact one of the other contributors in the project and seek their @@ -610,8 +610,8 @@ however, will stay intact on the Pull Request page. For the size of "one logical change", [0b5191f](https://github.com/nodejs/node/commit/0b5191f15d0f311c804d542b67e2e922d98834f8) can be a good example. It touches the implementation, the documentation, -and the tests, but is still one logical change. In general, the tests should -always pass when each individual commit lands on the master branch. +and the tests, but is still one logical change. All tests should always pass +when each individual commit lands on the master branch. ### Getting Approvals for Your Pull Request diff --git a/doc/guides/maintaining-V8.md b/doc/guides/maintaining-V8.md index 04bc155660105b..0ede3ac5f5e332 100644 --- a/doc/guides/maintaining-V8.md +++ b/doc/guides/maintaining-V8.md @@ -3,15 +3,14 @@ ## Background V8 follows the Chromium release schedule. The support horizon for Chromium is -very different from the support horizon that Node.js needs to provide to its -users. As a result Node.js needs to support a version of V8 for quite a bit -longer than what upstream needs to support. Since V8 doesn't have an LTS -supported branch, there is no official process around how the V8 branches in -Node.js are maintained. - -This document attempts to document the current processes and proposes a workflow -for maintaining the V8 branches in Node.js LTS and Current releases and how the -Node.js and V8 teams at Google can help. +different compared to the support horizon for Node.js. As a result, Node.js +needs to support multiple versions of V8 longer than what upstream needs +to support. V8 branches in Node.js lack of an official maintenance process due +to a missing LTS supported branch. + +This document attempts to outline the current maintenance processes, proposes +a workflow for maintaining the V8 branches in both Node.js LTS and current +releases, and discusses how the Node.js and V8 teams at Google can help. ## V8 Release Schedule diff --git a/doc/node.1 b/doc/node.1 index 26092d26520247..d543d9fa0355f2 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -156,6 +156,10 @@ Enable the collection of trace event tracing information. A comma-separated list of categories that should be traced when trace event tracing is enabled using .Fl -trace-events-enabled . . +.It Fl -trace-event-file-pattern Ar pattern +Template string specifying the filepath for the trace event data, it +supports \fB${rotation}\fR and \fB${pid}\fR. +. .It Fl -zero-fill-buffers Automatically zero-fills all newly allocated Buffer and SlowBuffer instances. . diff --git a/doc/onboarding-extras.md b/doc/onboarding-extras.md index 30e0e7579f36aa..fa2d1ae02d9b60 100644 --- a/doc/onboarding-extras.md +++ b/doc/onboarding-extras.md @@ -5,42 +5,43 @@ | Subsystem | Maintainers | | --- | --- | | `benchmark/*` | @nodejs/benchmarking, @mscdex | -| `bootstrap_node.js` | @fishrock123 | +| `bootstrap_node.js` | @nodejs/process | | `doc/*`, `*.md` | @nodejs/documentation | | `lib/assert` | @nodejs/testing | | `lib/async_hooks` | @nodejs/async\_hooks for bugs/reviews (+ @nodejs/diagnostics for API) | | `lib/buffer` | @nodejs/buffer | -| `lib/child_process` | @bnoordhuis, @cjihrig | -| `lib/cluster` | @bnoordhuis, @cjihrig, @mcollina | +| `lib/child_process` | @nodejs/child\_process | +| `lib/cluster` | @nodejs/cluster | | `lib/{crypto,tls,https}` | @nodejs/crypto | -| `lib/dgram` | @cjihrig, @mcollina | -| `lib/domains` | @misterdjules | +| `lib/dgram` | @nodejs/dgram | +| `lib/domains` | @nodejs/domains | | `lib/fs`, `src/{fs,file}` | @nodejs/fs | | `lib/{_}http{*}` | @nodejs/http | | `lib/inspector.js`, `src/inspector_*` | @nodejs/v8-inspector | | `lib/internal/url`, `src/node_url` | @nodejs/url | | `lib/net` | @bnoordhuis, @indutny, @nodejs/streams | -| `lib/repl` | @addaleax, @fishrock123 | +| `lib/repl` | @nodejs/repl | | `lib/{_}stream{*}` | @nodejs/streams | -| `lib/timers` | @fishrock123, @misterdjules | -| `lib/util` | @bnoordhuis, @cjihrig, @evanlucas | -| `lib/zlib` | @addaleax, @bnoordhuis, @indutny | +| `lib/timers` | @nodejs/timers | +| `lib/util` | @nodejs/util | +| `lib/zlib` | @nodejs/zlib | | `src/async-wrap.*` | @nodejs/async\_hooks | | `src/node_api.*` | @nodejs/n-api | | `src/node_crypto.*` | @nodejs/crypto | | `test/*` | @nodejs/testing | -| `tools/node_modules/eslint`, `.eslintrc` | @not-an-aardvark, @silverwind, @trott | +| `tools/node_modules/eslint`, `.eslintrc` | @nodejs/linting | | build | @nodejs/build | | `src/module_wrap.*`, `lib/internal/loader/*`, `lib/internal/vm/Module.js` | @nodejs/modules | | GYP | @nodejs/gyp | | performance | @nodejs/performance | | platform specific | @nodejs/platform-{aix,arm,freebsd,macos,ppc,smartos,s390,windows} | | python code | @nodejs/python | -| upgrading c-ares | @jbergstroem | -| upgrading http-parser | @jbergstroem, @nodejs/http | -| upgrading libuv | @saghul | +| upgrading c-ares | @rvagg | +| upgrading http-parser | @nodejs/http, @nodejs/http2 | +| upgrading libuv | @nodejs/libuv | | upgrading npm | @fishrock123, @MylesBorins | | upgrading V8 | @nodejs/v8, @nodejs/post-mortem | +| Embedded use or delivery of Node.js | @nodejs/delivery-channels | When things need extra attention, are controversial, or `semver-major`: @nodejs/tsc diff --git a/lib/_http_client.js b/lib/_http_client.js index 63efb06b8f7f9c..71d81cd64dd58c 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -353,7 +353,7 @@ function socketCloseListener() { var parser = socket.parser; if (req.res && req.res.readable) { // Socket closed before we emitted 'end' below. - req.res.emit('aborted'); + if (!req.res.complete) req.res.emit('aborted'); var res = req.res; res.on('end', function() { res.emit('close'); diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 9713224bd7fb83..c8116811ba5dbd 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -839,7 +839,7 @@ function resume_(stream, state) { Readable.prototype.pause = function() { debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { + if (this._readableState.flowing !== false) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); diff --git a/lib/async_hooks.js b/lib/async_hooks.js index 8711755b25079b..19e7886360c13f 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -48,15 +48,15 @@ const { class AsyncHook { constructor({ init, before, after, destroy, promiseResolve }) { if (init !== undefined && typeof init !== 'function') - throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'init'); + throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'hook.init'); if (before !== undefined && typeof before !== 'function') - throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'before'); + throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'hook.before'); if (after !== undefined && typeof after !== 'function') - throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'before'); + throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'hook.after'); if (destroy !== undefined && typeof destroy !== 'function') - throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'before'); + throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'hook.destroy'); if (promiseResolve !== undefined && typeof promiseResolve !== 'function') - throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'promiseResolve'); + throw new errors.TypeError('ERR_ASYNC_CALLBACK', 'hook.promiseResolve'); this[init_symbol] = init; this[before_symbol] = before; diff --git a/lib/buffer.js b/lib/buffer.js index c8e3f05d5c268a..ddbdbb6d03e531 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -742,7 +742,7 @@ Buffer.prototype.compare = function compare(target, // - buffer - a Buffer to search // - val - a string, Buffer, or number // - byteOffset - an index into `buffer`; will be clamped to an int32 -// - encoding - an optional encoding, relevant is val is a string +// - encoding - an optional encoding, relevant if val is a string // - dir - true for indexOf, false for lastIndexOf function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { if (typeof byteOffset === 'string') { @@ -1242,6 +1242,7 @@ function toFloat(x) { Buffer.prototype.readDoubleBE = function(offset, noAssert) { + offset = offset >>> 0; const x1 = this.readUInt32BE(offset + 0, noAssert); const x0 = this.readUInt32BE(offset + 4, noAssert); return toDouble(x0, x1); @@ -1249,6 +1250,7 @@ Buffer.prototype.readDoubleBE = function(offset, noAssert) { Buffer.prototype.readDoubleLE = function(offset, noAssert) { + offset = offset >>> 0; const x0 = this.readUInt32LE(offset + 0, noAssert); const x1 = this.readUInt32LE(offset + 4, noAssert); return toDouble(x0, x1); @@ -1256,11 +1258,13 @@ Buffer.prototype.readDoubleLE = function(offset, noAssert) { Buffer.prototype.readFloatBE = function(offset, noAssert) { + offset = offset >>> 0; return toFloat(this.readUInt32BE(offset, noAssert)); }; Buffer.prototype.readFloatLE = function(offset, noAssert) { + offset = offset >>> 0; return toFloat(this.readUInt32LE(offset, noAssert)); }; diff --git a/lib/events.js b/lib/events.js index 07fa6484363116..799f8b4309d4c3 100644 --- a/lib/events.js +++ b/lib/events.js @@ -94,6 +94,47 @@ EventEmitter.prototype.getMaxListeners = function getMaxListeners() { return $getMaxListeners(this); }; +// Returns the longest sequence of `a` that fully appears in `b`, +// of length at least 3. +// This is a lazy approach but should work well enough, given that stack +// frames are usually unequal or otherwise appear in groups, and that +// we only run this code in case of an unhandled exception. +function longestSeqContainedIn(a, b) { + for (var len = a.length; len >= 3; --len) { + for (var i = 0; i < a.length - len; ++i) { + // Attempt to find a[i:i+len] in b + for (var j = 0; j < b.length - len; ++j) { + let matches = true; + for (var k = 0; k < len; ++k) { + if (a[i + k] !== b[j + k]) { + matches = false; + break; + } + } + if (matches) + return [ len, i, j ]; + } + } + } + + return [ 0, 0, 0 ]; +} + +function enhanceStackTrace(err, own) { + const sep = '\nEmitted \'error\' event at:\n'; + + const errStack = err.stack.split('\n').slice(1); + const ownStack = own.stack.split('\n').slice(1); + + const [ len, off ] = longestSeqContainedIn(ownStack, errStack); + if (len > 0) { + ownStack.splice(off + 1, len - 1, + ' [... lines matching original stack trace ...]'); + } + // Do this last, because it is the only operation with side effects. + err.stack = err.stack + sep + ownStack.join('\n'); +} + EventEmitter.prototype.emit = function emit(type, ...args) { let doError = (type === 'error'); @@ -109,13 +150,25 @@ EventEmitter.prototype.emit = function emit(type, ...args) { if (args.length > 0) er = args[0]; if (er instanceof Error) { + try { + const { kExpandStackSymbol } = require('internal/util'); + const capture = {}; + Error.captureStackTrace(capture, EventEmitter.prototype.emit); + Object.defineProperty(er, kExpandStackSymbol, { + value: enhanceStackTrace.bind(null, er, capture), + configurable: true + }); + } catch (e) {} + + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. throw er; // Unhandled 'error' event } // At least give some kind of context to the user const errors = lazyErrors(); const err = new errors.Error('ERR_UNHANDLED_ERROR', er); err.context = er; - throw err; + throw err; // Unhandled 'error' event } const handler = events[type]; diff --git a/lib/internal/async_hooks.js b/lib/internal/async_hooks.js index ea49239a6178bb..6f9b5b0a93eee5 100644 --- a/lib/internal/async_hooks.js +++ b/lib/internal/async_hooks.js @@ -124,7 +124,7 @@ function validateAsyncId(asyncId, type) { // emitInitScript. function emitInitNative(asyncId, type, triggerAsyncId, resource) { active_hooks.call_depth += 1; - // Use a single try/catch for all hook to avoid setting up one per iteration. + // Use a single try/catch for all hooks to avoid setting up one per iteration. try { for (var i = 0; i < active_hooks.array.length; i++) { if (typeof active_hooks.array[i][init_symbol] === 'function') { diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 06bed3faaa4870..d8096f90e172ea 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -105,6 +105,7 @@ process.emitWarning( 'The ESM module loader is experimental.', 'ExperimentalWarning', undefined); + NativeModule.require('internal/process/modules').setup(); } @@ -438,6 +439,11 @@ } catch (er) { // nothing to be done about it at this point. } + try { + const { kExpandStackSymbol } = NativeModule.require('internal/util'); + if (typeof er[kExpandStackSymbol] === 'function') + er[kExpandStackSymbol](); + } catch (er) {} return false; } diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 9247292ff47fce..93b76588db03b2 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -306,8 +306,23 @@ function onStreamClose(code) { if (state.fd !== undefined) tryClose(state.fd); - stream.push(null); - stream[kMaybeDestroy](null, code); + + // Defer destroy we actually emit end. + if (stream._readableState.endEmitted || code !== NGHTTP2_NO_ERROR) { + // If errored or ended, we can destroy immediately. + stream[kMaybeDestroy](null, code); + } else { + // Wait for end to destroy. + stream.on('end', stream[kMaybeDestroy]); + // Push a null so the stream can end whenever the client consumes + // it completely. + stream.push(null); + + // Same as net. + if (stream.readableLength === 0) { + stream.read(0); + } + } } // Receives a chunk of data for a given stream and forwards it on @@ -325,11 +340,19 @@ function onStreamRead(nread, buf) { } return; } + // Last chunk was received. End the readable side. debug(`Http2Stream ${stream[kID]} [Http2Session ` + `${sessionName(stream[kSession][kType])}]: ending readable.`); - stream.push(null); - stream[kMaybeDestroy](); + + // defer this until we actually emit end + if (stream._readableState.endEmitted) { + stream[kMaybeDestroy](); + } else { + stream.on('end', stream[kMaybeDestroy]); + stream.push(null); + stream.read(0); + } } // Called when the remote peer settings have been updated. @@ -1106,6 +1129,11 @@ class Http2Session extends EventEmitter { // Destroy any pending and open streams const cancel = new errors.Error('ERR_HTTP2_STREAM_CANCEL'); + if (error) { + cancel.cause = error; + if (typeof error.message === 'string') + cancel.message += ` (caused by: ${error.message})`; + } state.pendingStreams.forEach((stream) => stream.destroy(cancel)); state.streams.forEach((stream) => stream.destroy(error)); @@ -1825,21 +1853,25 @@ class Http2Stream extends Duplex { session[kMaybeDestroy](); process.nextTick(emit, this, 'close', code); callback(err); - } + } // The Http2Stream can be destroyed if it has closed and if the readable // side has received the final chunk. [kMaybeDestroy](error, code = NGHTTP2_NO_ERROR) { - if (error == null) { - if (code === NGHTTP2_NO_ERROR && - (!this._readableState.ended || - !this._writableState.ended || - this._writableState.pendingcb > 0 || - !this.closed)) { - return; - } + if (error || code !== NGHTTP2_NO_ERROR) { + this.destroy(error); + return; + } + + // TODO(mcollina): remove usage of _*State properties + if (this._readableState.ended && + this._writableState.ended && + this._writableState.pendingcb === 0 && + this.closed) { + this.destroy(); + // This should return, but eslint complains. + // return } - this.destroy(error); } } @@ -2461,8 +2493,17 @@ function connectionListener(socket) { return httpConnectionListener.call(this, socket); } // Let event handler deal with the socket - if (!this.emit('unknownProtocol', socket)) - socket.destroy(); + debug(`Unknown protocol from ${socket.remoteAddress}:${socket.remotePort}`); + if (!this.emit('unknownProtocol', socket)) { + // We don't know what to do, so let's just tell the other side what's + // going on in a format that they *might* understand. + socket.end('HTTP/1.0 403 Forbidden\r\n' + + 'Content-Type: text/plain\r\n\r\n' + + 'Unknown ALPN Protocol, expected `h2` to be available.\n' + + 'If this is a HTTP request: The server was not ' + + 'configured with the `allowHTTP1` option or a ' + + 'listener for the `unknownProtocol` event.\n'); + } return; } diff --git a/lib/internal/loader/DefaultResolve.js b/lib/internal/loader/DefaultResolve.js index 69dd9537c18c2d..d815be87dd8954 100644 --- a/lib/internal/loader/DefaultResolve.js +++ b/lib/internal/loader/DefaultResolve.js @@ -2,7 +2,6 @@ const { URL } = require('url'); const CJSmodule = require('module'); -const internalURLModule = require('internal/url'); const internalFS = require('internal/fs'); const NativeModule = require('native_module'); const { extname } = require('path'); @@ -11,6 +10,7 @@ const preserveSymlinks = !!process.binding('config').preserveSymlinks; const errors = require('internal/errors'); const { resolve: moduleWrapResolve } = internalBinding('module_wrap'); const StringStartsWith = Function.call.bind(String.prototype.startsWith); +const { getURLFromFilePath, getPathFromURL } = require('internal/url'); const realpathCache = new Map(); @@ -57,7 +57,8 @@ function resolve(specifier, parentURL) { let url; try { - url = search(specifier, parentURL); + url = search(specifier, + parentURL || getURLFromFilePath(`${process.cwd()}/`).href); } catch (e) { if (typeof e.message === 'string' && StringStartsWith(e.message, 'Cannot find module')) @@ -66,17 +67,27 @@ function resolve(specifier, parentURL) { } if (!preserveSymlinks) { - const real = realpathSync(internalURLModule.getPathFromURL(url), { + const real = realpathSync(getPathFromURL(url), { [internalFS.realpathCacheKey]: realpathCache }); const old = url; - url = internalURLModule.getURLFromFilePath(real); + url = getURLFromFilePath(real); url.search = old.search; url.hash = old.hash; } const ext = extname(url.pathname); - return { url: `${url}`, format: extensionFormatMap[ext] || ext }; + + let format = extensionFormatMap[ext]; + if (!format) { + const isMain = parentURL === undefined; + if (isMain) + format = 'cjs'; + else + throw new errors.Error('ERR_UNKNOWN_FILE_EXTENSION', url.pathname); + } + + return { url: `${url}`, format }; } module.exports = resolve; diff --git a/lib/internal/loader/Loader.js b/lib/internal/loader/Loader.js index eda42645f170f6..f0edbbf921f40f 100644 --- a/lib/internal/loader/Loader.js +++ b/lib/internal/loader/Loader.js @@ -1,51 +1,21 @@ 'use strict'; -const path = require('path'); -const { getURLFromFilePath, URL } = require('internal/url'); const errors = require('internal/errors'); - const ModuleMap = require('internal/loader/ModuleMap'); const ModuleJob = require('internal/loader/ModuleJob'); const defaultResolve = require('internal/loader/DefaultResolve'); const createDynamicModule = require('internal/loader/CreateDynamicModule'); const translators = require('internal/loader/Translators'); -const { setImportModuleDynamicallyCallback } = internalBinding('module_wrap'); + const FunctionBind = Function.call.bind(Function.prototype.bind); const debug = require('util').debuglog('esm'); -// Returns a file URL for the current working directory. -function getURLStringForCwd() { - try { - return getURLFromFilePath(`${process.cwd()}/`).href; - } catch (e) { - e.stack; - // If the current working directory no longer exists. - if (e.code === 'ENOENT') { - return undefined; - } - throw e; - } -} - -function normalizeReferrerURL(referrer) { - if (typeof referrer === 'string' && path.isAbsolute(referrer)) { - return getURLFromFilePath(referrer).href; - } - return new URL(referrer).href; -} - /* A Loader instance is used as the main entry point for loading ES modules. * Currently, this is a singleton -- there is only one used for loading * the main module and everything in its dependency graph. */ class Loader { - constructor(base = getURLStringForCwd()) { - if (typeof base !== 'string') - throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'base', 'string'); - - this.base = base; - this.isMain = true; - + constructor() { // methods which translate input code or other information // into es modules this.translators = translators; @@ -71,8 +41,9 @@ class Loader { this._dynamicInstantiate = undefined; } - async resolve(specifier, parentURL = this.base) { - if (typeof parentURL !== 'string') + async resolve(specifier, parentURL) { + const isMain = parentURL === undefined; + if (!isMain && typeof parentURL !== 'string') throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'parentURL', 'string'); const { url, format } = @@ -93,7 +64,7 @@ class Loader { return { url, format }; } - async import(specifier, parent = this.base) { + async import(specifier, parent) { const job = await this.getModuleJob(specifier, parent); const module = await job.run(); return module.namespace(); @@ -107,7 +78,7 @@ class Loader { this._dynamicInstantiate = FunctionBind(dynamicInstantiate, null); } - async getModuleJob(specifier, parentURL = this.base) { + async getModuleJob(specifier, parentURL) { const { url, format } = await this.resolve(specifier, parentURL); let job = this.moduleMap.get(url); if (job !== undefined) @@ -134,24 +105,16 @@ class Loader { } let inspectBrk = false; - if (this.isMain) { - if (process._breakFirstLine) { - delete process._breakFirstLine; - inspectBrk = true; - } - this.isMain = false; + if (process._breakFirstLine) { + delete process._breakFirstLine; + inspectBrk = true; } job = new ModuleJob(this, url, loaderInstance, inspectBrk); this.moduleMap.set(url, job); return job; } - - static registerImportDynamicallyCallback(loader) { - setImportModuleDynamicallyCallback(async (referrer, specifier) => { - return loader.import(specifier, normalizeReferrerURL(referrer)); - }); - } } Object.setPrototypeOf(Loader.prototype, null); + module.exports = Loader; diff --git a/lib/internal/loader/ModuleJob.js b/lib/internal/loader/ModuleJob.js index db37765b20bd0c..b3553fc7235d95 100644 --- a/lib/internal/loader/ModuleJob.js +++ b/lib/internal/loader/ModuleJob.js @@ -15,6 +15,7 @@ class ModuleJob { this.loader = loader; this.error = null; this.hadError = false; + this.inspectBrk = inspectBrk; // This is a Promise<{ module, reflect }>, whose fields will be copied // onto `this` by `link()` below once it has been resolved. @@ -26,10 +27,6 @@ class ModuleJob { const link = async () => { ({ module: this.module, reflect: this.reflect } = await this.modulePromise); - if (inspectBrk) { - const initWrapper = process.binding('inspector').callAndPauseOnStart; - initWrapper(this.module.instantiate, this.module); - } assert(this.module instanceof ModuleWrap); const dependencyJobs = []; @@ -53,10 +50,11 @@ class ModuleJob { } async instantiate() { - if (this.instantiated) { - return this.instantiated; + if (!this.instantiated) { + return this.instantiated = this._instantiate(); } - return this.instantiated = this._instantiate(); + await this.instantiated; + return this.module; } // This method instantiates the module associated with this job and its @@ -83,7 +81,12 @@ class ModuleJob { throw e; } try { - this.module.instantiate(); + if (this.inspectBrk) { + const initWrapper = process.binding('inspector').callAndPauseOnStart; + initWrapper(this.module.instantiate, this.module); + } else { + this.module.instantiate(); + } } catch (e) { decorateErrorStack(e); throw e; diff --git a/lib/internal/loader/Translators.js b/lib/internal/loader/Translators.js index d2f28774177fd6..18b1b12fd15854 100644 --- a/lib/internal/loader/Translators.js +++ b/lib/internal/loader/Translators.js @@ -19,7 +19,7 @@ const JsonParse = JSON.parse; const translators = new SafeMap(); module.exports = translators; -// Stragety for loading a standard JavaScript module +// Strategy for loading a standard JavaScript module translators.set('esm', async (url) => { const source = `${await readFileAsync(new URL(url))}`; debug(`Translating StandardModule ${url}`); @@ -62,7 +62,7 @@ translators.set('builtin', async (url) => { }); }); -// Stragety for loading a node native module +// Strategy for loading a node native module translators.set('addon', async (url) => { debug(`Translating NativeModule ${url}`); return createDynamicModule(['default'], url, (reflect) => { @@ -74,7 +74,7 @@ translators.set('addon', async (url) => { }); }); -// Stragety for loading a JSON file +// Strategy for loading a JSON file translators.set('json', async (url) => { debug(`Translating JSONModule ${url}`); return createDynamicModule(['default'], url, (reflect) => { diff --git a/lib/internal/process/modules.js b/lib/internal/process/modules.js new file mode 100644 index 00000000000000..bc977c718725f2 --- /dev/null +++ b/lib/internal/process/modules.js @@ -0,0 +1,47 @@ +'use strict'; + +const { + setImportModuleDynamicallyCallback +} = internalBinding('module_wrap'); + +const { getURLFromFilePath } = require('internal/url'); +const Loader = require('internal/loader/Loader'); +const path = require('path'); +const { URL } = require('url'); + +function normalizeReferrerURL(referrer) { + if (typeof referrer === 'string' && path.isAbsolute(referrer)) { + return getURLFromFilePath(referrer).href; + } + return new URL(referrer).href; +} + +let loaderResolve; +exports.loaderPromise = new Promise((resolve, reject) => { + loaderResolve = resolve; +}); + +exports.ESMLoader = undefined; + +exports.setup = function() { + let ESMLoader = new Loader(); + const loaderPromise = (async () => { + const userLoader = process.binding('config').userLoader; + if (userLoader) { + const hooks = await ESMLoader.import( + userLoader, getURLFromFilePath(`${process.cwd()}/`).href); + ESMLoader = new Loader(); + ESMLoader.hook(hooks); + exports.ESMLoader = ESMLoader; + } + return ESMLoader; + })(); + loaderResolve(loaderPromise); + + setImportModuleDynamicallyCallback(async (referrer, specifier) => { + const loader = await loaderPromise; + return loader.import(specifier, normalizeReferrerURL(referrer)); + }); + + exports.ESMLoader = ESMLoader; +}; diff --git a/lib/internal/readline.js b/lib/internal/readline.js index e3d3007a75c645..979e62090b3b1d 100644 --- a/lib/internal/readline.js +++ b/lib/internal/readline.js @@ -87,33 +87,33 @@ if (process.binding('config').hasIntl) { if ( code >= 0x1100 && ( code <= 0x115f || // Hangul Jamo - 0x2329 === code || // LEFT-POINTING ANGLE BRACKET - 0x232a === code || // RIGHT-POINTING ANGLE BRACKET + code === 0x2329 || // LEFT-POINTING ANGLE BRACKET + code === 0x232a || // RIGHT-POINTING ANGLE BRACKET // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (0x2e80 <= code && code <= 0x3247 && code !== 0x303f) || + code >= 0x2e80 && code <= 0x3247 && code !== 0x303f || // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - 0x3250 <= code && code <= 0x4dbf || + code >= 0x3250 && code <= 0x4dbf || // CJK Unified Ideographs .. Yi Radicals - 0x4e00 <= code && code <= 0xa4c6 || + code >= 0x4e00 && code <= 0xa4c6 || // Hangul Jamo Extended-A - 0xa960 <= code && code <= 0xa97c || + code >= 0xa960 && code <= 0xa97c || // Hangul Syllables - 0xac00 <= code && code <= 0xd7a3 || + code >= 0xac00 && code <= 0xd7a3 || // CJK Compatibility Ideographs - 0xf900 <= code && code <= 0xfaff || + code >= 0xf900 && code <= 0xfaff || // Vertical Forms - 0xfe10 <= code && code <= 0xfe19 || + code >= 0xfe10 && code <= 0xfe19 || // CJK Compatibility Forms .. Small Form Variants - 0xfe30 <= code && code <= 0xfe6b || + code >= 0xfe30 && code <= 0xfe6b || // Halfwidth and Fullwidth Forms - 0xff01 <= code && code <= 0xff60 || - 0xffe0 <= code && code <= 0xffe6 || + code >= 0xff01 && code <= 0xff60 || + code >= 0xffe0 && code <= 0xffe6 || // Kana Supplement - 0x1b000 <= code && code <= 0x1b001 || + code >= 0x1b000 && code <= 0x1b001 || // Enclosed Ideographic Supplement - 0x1f200 <= code && code <= 0x1f251 || + code >= 0x1f200 && code <= 0x1f251 || // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - 0x20000 <= code && code <= 0x3fffd + code >= 0x20000 && code <= 0x3fffd ) ) { return true; diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js index 3242b15eabdb0d..9790696bfc7131 100644 --- a/lib/internal/streams/legacy.js +++ b/lib/internal/streams/legacy.js @@ -12,10 +12,8 @@ Stream.prototype.pipe = function(dest, options) { var source = this; function ondata(chunk) { - if (dest.writable) { - if (false === dest.write(chunk) && source.pause) { - source.pause(); - } + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause(); } } diff --git a/lib/internal/util.js b/lib/internal/util.js index fd481450bc246d..0241573fa818c8 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -320,5 +320,6 @@ module.exports = { // Used by the buffer module to capture an internal reference to the // default isEncoding implementation, just in case userland overrides it. - kIsEncodingSymbol: Symbol('node.isEncoding') + kIsEncodingSymbol: Symbol('kIsEncodingSymbol'), + kExpandStackSymbol: Symbol('kExpandStackSymbol') }; diff --git a/lib/internal/v8_prof_processor.js b/lib/internal/v8_prof_processor.js index bb71213a8f4dae..7592253060294c 100644 --- a/lib/internal/v8_prof_processor.js +++ b/lib/internal/v8_prof_processor.js @@ -33,9 +33,9 @@ if (process.platform === 'darwin') { tickArguments.push('--windows'); } tickArguments.push.apply(tickArguments, process.argv.slice(1)); -script = `(function(require) { +script = `(function(module, require) { arguments = ${JSON.stringify(tickArguments)}; function write (s) { process.stdout.write(s) } ${script} })`; -vm.runInThisContext(script)(require); +vm.runInThisContext(script)(module, require); diff --git a/lib/module.js b/lib/module.js index 22a3e354c31617..877cca590f077b 100644 --- a/lib/module.js +++ b/lib/module.js @@ -24,7 +24,6 @@ const NativeModule = require('native_module'); const util = require('util'); const { decorateErrorStack } = require('internal/util'); -const internalModule = require('internal/module'); const { getURLFromFilePath } = require('internal/url'); const vm = require('vm'); const assert = require('assert').ok; @@ -35,6 +34,7 @@ const { internalModuleReadFile, internalModuleStat } = process.binding('fs'); +const internalModule = require('internal/module'); const preserveSymlinks = !!process.binding('config').preserveSymlinks; const experimentalModules = !!process.binding('config').experimentalModules; @@ -43,10 +43,9 @@ const errors = require('internal/errors'); module.exports = Module; // these are below module.exports for the circular reference -const Loader = require('internal/loader/Loader'); +const internalESModule = require('internal/process/modules'); const ModuleJob = require('internal/loader/ModuleJob'); const createDynamicModule = require('internal/loader/CreateDynamicModule'); -let ESMLoader; function stat(filename) { filename = path.toNamespacedPath(filename); @@ -444,7 +443,6 @@ Module._resolveLookupPaths = function(request, parent, newReturn) { return (newReturn ? parentDir : [id, parentDir]); }; - // Check the cache for the requested file. // 1. If a module already exists in the cache: return its exports object. // 2. If the module is native: call `NativeModule.require()` with the @@ -457,22 +455,10 @@ Module._load = function(request, parent, isMain) { debug('Module._load REQUEST %s parent: %s', request, parent.id); } - if (isMain && experimentalModules) { - (async () => { - // loader setup - if (!ESMLoader) { - ESMLoader = new Loader(); - const userLoader = process.binding('config').userLoader; - if (userLoader) { - ESMLoader.isMain = false; - const hooks = await ESMLoader.import(userLoader); - ESMLoader = new Loader(); - ESMLoader.hook(hooks); - } - } - Loader.registerImportDynamicallyCallback(ESMLoader); - await ESMLoader.import(getURLFromFilePath(request).pathname); - })() + if (experimentalModules && isMain) { + internalESModule.loaderPromise.then((loader) => { + return loader.import(getURLFromFilePath(request).pathname); + }) .catch((e) => { decorateErrorStack(e); console.error(e); @@ -575,7 +561,8 @@ Module.prototype.load = function(filename) { Module._extensions[extension](this, filename); this.loaded = true; - if (ESMLoader) { + if (experimentalModules) { + const ESMLoader = internalESModule.ESMLoader; const url = getURLFromFilePath(filename); const urlString = `${url}`; const exports = this.exports; diff --git a/lib/net.js b/lib/net.js index 09ad917ad0b7f6..90e0db558e3a9b 100644 --- a/lib/net.js +++ b/lib/net.js @@ -245,7 +245,7 @@ function Socket(options) { // shut down the socket when we're finished with it. this.on('finish', onSocketFinish); - this.on('_socketEnd', onSocketEnd); + this.on('end', onReadableStreamEnd); initSocketHandle(this); @@ -341,32 +341,6 @@ function afterShutdown(status, handle) { } } -// the EOF has been received, and no more bytes are coming. -// if the writable side has ended already, then clean everything -// up. -function onSocketEnd() { - // XXX Should not have to do as much in this function. - // ended should already be true, since this is called *after* - // the EOF errno and onread has eof'ed - debug('onSocketEnd', this._readableState); - this._readableState.ended = true; - if (this._readableState.endEmitted) { - this.readable = false; - maybeDestroy(this); - } else { - this.once('end', function end() { - this.readable = false; - maybeDestroy(this); - }); - this.read(0); - } - - if (!this.allowHalfOpen) { - this.write = writeAfterFIN; - this.destroySoon(); - } -} - // Provide a better error message when we call end() as a result // of the other side sending a FIN. The standard 'write after end' // is overly vague, and makes it seem like the user's code is to blame. @@ -512,6 +486,12 @@ Socket.prototype.end = function(data, encoding, callback) { }; +// Called when the 'end' event is emitted. +function onReadableStreamEnd() { + maybeDestroy(this); +} + + // Call whenever we set writable=false or readable=false function maybeDestroy(socket) { if (!socket.readable && @@ -625,10 +605,11 @@ function onread(nread, buffer) { // Do it before `maybeDestroy` for correct order of events: // `end` -> `close` self.push(null); + self.read(0); - if (self.readableLength === 0) { - self.readable = false; - maybeDestroy(self); + if (!self.allowHalfOpen) { + self.write = writeAfterFIN; + self.destroySoon(); } // internal end event so that we know that the actual socket diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 6fd6e4a6b768ce..bd1bee19fc2a54 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -8,6 +8,7 @@ const { observerCounts, setupObservers, timeOrigin, + timeOriginTimestamp, timerify, constants } = process.binding('performance'); @@ -145,6 +146,13 @@ function now() { return hr[0] * 1000 + hr[1] / 1e6; } +function getMilestoneTimestamp(milestoneIdx) { + const ns = milestones[milestoneIdx]; + if (ns === -1) + return ns; + return ns / 1e6 - timeOrigin; +} + class PerformanceNodeTiming { constructor() {} @@ -157,7 +165,7 @@ class PerformanceNodeTiming { } get startTime() { - return timeOrigin; + return 0; } get duration() { @@ -165,59 +173,64 @@ class PerformanceNodeTiming { } get nodeStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_NODE_START]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_NODE_START); } get v8Start() { - return milestones[NODE_PERFORMANCE_MILESTONE_V8_START]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_V8_START); } get environment() { - return milestones[NODE_PERFORMANCE_MILESTONE_ENVIRONMENT]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_ENVIRONMENT); } get loopStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_LOOP_START]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_LOOP_START); } get loopExit() { - return milestones[NODE_PERFORMANCE_MILESTONE_LOOP_EXIT]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_LOOP_EXIT); } get bootstrapComplete() { - return milestones[NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE); } get thirdPartyMainStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_START]; + return getMilestoneTimestamp( + NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_START); } get thirdPartyMainEnd() { - return milestones[NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_END]; + return getMilestoneTimestamp( + NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_END); } get clusterSetupStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_START]; + return getMilestoneTimestamp( + NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_START); } get clusterSetupEnd() { - return milestones[NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_END]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_END); } get moduleLoadStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START); } get moduleLoadEnd() { - return milestones[NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_END]; + return getMilestoneTimestamp(NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_END); } get preloadModuleLoadStart() { - return milestones[NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_START]; + return getMilestoneTimestamp( + NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_START); } get preloadModuleLoadEnd() { - return milestones[NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_END]; + return getMilestoneTimestamp( + NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_END); } [kInspect]() { @@ -466,11 +479,11 @@ class Performance extends PerformanceObserverEntryList { } get timeOrigin() { - return timeOrigin; + return timeOriginTimestamp; } now() { - return now(); + return now() - timeOrigin; } mark(name) { @@ -549,8 +562,9 @@ class Performance extends PerformanceObserverEntryList { [kInspect]() { return { - timeOrigin, - nodeTiming + maxEntries: this.maxEntries, + nodeTiming: this.nodeTiming, + timeOrigin: this.timeOrigin }; } } diff --git a/lib/repl.js b/lib/repl.js index b7af18ed492a9f..2078c5dcedaf20 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -155,6 +155,8 @@ function REPLServer(prompt, self.replMode = replMode || exports.REPL_MODE_SLOPPY; self.underscoreAssigned = false; self.last = undefined; + self.underscoreErrAssigned = false; + self.lastError = undefined; self.breakEvalOnSigint = !!breakEvalOnSigint; self.editorMode = false; // Context id for use with the inspector protocol. @@ -295,6 +297,8 @@ function REPLServer(prompt, internalUtil.decorateErrorStack(e); Error.prepareStackTrace = pstrace; const isError = internalUtil.isError(e); + if (!self.underscoreErrAssigned) + self.lastError = e; if (e instanceof SyntaxError && e.stack) { // remove repl:line-number and stack trace e.stack = e.stack @@ -693,6 +697,7 @@ REPLServer.prototype.createContext = function() { REPLServer.prototype.resetContext = function() { this.context = this.createContext(); this.underscoreAssigned = false; + this.underscoreErrAssigned = false; this.lines = []; this.lines.level = []; @@ -708,6 +713,19 @@ REPLServer.prototype.resetContext = function() { } }); + Object.defineProperty(this.context, '_error', { + configurable: true, + get: () => this.lastError, + set: (value) => { + this.lastError = value; + if (!this.underscoreErrAssigned) { + this.underscoreErrAssigned = true; + this.outputStream.write( + 'Expression assignment to _error now disabled.\n'); + } + } + }); + // Allow REPL extensions to extend the new context this.emit('reset', this.context); }; diff --git a/lib/util.js b/lib/util.js index 318a4cef24c93d..cd6321cfe5270e 100644 --- a/lib/util.js +++ b/lib/util.js @@ -333,9 +333,10 @@ inspect.colors = Object.assign(Object.create(null), { }); // Don't use 'blue' not visible on cmd.exe +const windows = process.platform === 'win32'; inspect.styles = Object.assign(Object.create(null), { 'special': 'cyan', - 'number': 'yellow', + 'number': windows ? 'yellow' : 'blue', 'boolean': 'yellow', 'undefined': 'grey', 'null': 'bold', diff --git a/node.gyp b/node.gyp index 7da486ff6d8c7b..d85f6a491a9eea 100644 --- a/node.gyp +++ b/node.gyp @@ -115,6 +115,7 @@ 'lib/internal/net.js', 'lib/internal/module.js', 'lib/internal/os.js', + 'lib/internal/process/modules.js', 'lib/internal/process/next_tick.js', 'lib/internal/process/promises.js', 'lib/internal/process/stdio.js', @@ -359,9 +360,10 @@ 'src/node_internals.h', 'src/node_javascript.h', 'src/node_mutex.h', - 'src/node_platform.h', 'src/node_perf.h', 'src/node_perf_common.h', + 'src/node_persistent.h', + 'src/node_platform.h', 'src/node_root_certs.h', 'src/node_version.h', 'src/node_watchdog.h', diff --git a/src/async_wrap.cc b/src/async_wrap.cc index 93bd3d4864fd5d..cd9f26d7782d46 100644 --- a/src/async_wrap.cc +++ b/src/async_wrap.cc @@ -410,8 +410,8 @@ static void DisablePromiseHook(const FunctionCallbackInfo& args) { class DestroyParam { public: double asyncId; - v8::Persistent target; - v8::Persistent propBag; + Persistent target; + Persistent propBag; }; @@ -426,8 +426,6 @@ void AsyncWrap::WeakCallback(const v8::WeakCallbackInfo& info) { if (val->IsFalse()) { AsyncWrap::EmitDestroy(env, p->asyncId); } - p->target.Reset(); - p->propBag.Reset(); delete p; } diff --git a/src/base_object-inl.h b/src/base_object-inl.h index 900fc2b3edb9ca..51ef46599667df 100644 --- a/src/base_object-inl.h +++ b/src/base_object-inl.h @@ -42,12 +42,7 @@ inline BaseObject::BaseObject(Environment* env, v8::Local handle) } -inline BaseObject::~BaseObject() { - CHECK(persistent_handle_.IsEmpty()); -} - - -inline v8::Persistent& BaseObject::persistent() { +inline Persistent& BaseObject::persistent() { return persistent_handle_; } @@ -65,8 +60,7 @@ inline Environment* BaseObject::env() const { template inline void BaseObject::WeakCallback( const v8::WeakCallbackInfo& data) { - std::unique_ptr self(data.GetParameter()); - self->persistent().Reset(); + delete data.GetParameter(); } diff --git a/src/base_object.h b/src/base_object.h index 965683d029e43e..478499bbfeb5b2 100644 --- a/src/base_object.h +++ b/src/base_object.h @@ -24,6 +24,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS +#include "node_persistent.h" #include "v8.h" namespace node { @@ -33,18 +34,13 @@ class Environment; class BaseObject { public: inline BaseObject(Environment* env, v8::Local handle); - inline virtual ~BaseObject(); + virtual ~BaseObject() = default; // Returns the wrapped object. Returns an empty handle when // persistent.IsEmpty() is true. inline v8::Local object(); - // The parent class is responsible for calling .Reset() on destruction - // when the persistent handle is strong because there is no way for - // BaseObject to know when the handle goes out of scope. - // Weak handles have been reset by the time the destructor runs but - // calling .Reset() again is harmless. - inline v8::Persistent& persistent(); + inline Persistent& persistent(); inline Environment* env() const; @@ -71,7 +67,7 @@ class BaseObject { // position of members in memory are predictable. For more information please // refer to `doc/guides/node-postmortem-support.md` friend int GenDebugSymbols(); - v8::Persistent persistent_handle_; + Persistent persistent_handle_; Environment* env_; }; diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index 165a8cda20618b..b8da20346692c3 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -598,7 +598,6 @@ class QueryWrap : public AsyncWrap { ~QueryWrap() override { CHECK_EQ(false, persistent().IsEmpty()); ClearWrap(object()); - persistent().Reset(); } // Subclasses should implement the appropriate Send method. diff --git a/src/env-inl.h b/src/env-inl.h index 0a2adae2bb0ade..f647f428c324ff 100644 --- a/src/env-inl.h +++ b/src/env-inl.h @@ -357,9 +357,6 @@ inline Environment::~Environment() { context()->SetAlignedPointerInEmbedderData(kContextEmbedderDataIndex, nullptr); -#define V(PropertyName, TypeName) PropertyName ## _.Reset(); - ENVIRONMENT_STRONG_PERSISTENT_PROPERTIES(V) -#undef V delete[] heap_statistics_buffer_; delete[] heap_space_statistics_buffer_; @@ -541,8 +538,8 @@ void Environment::CreateImmediate(native_immediate_callback cb, native_immediate_callbacks_.push_back({ cb, data, - std::unique_ptr>(obj.IsEmpty() ? - nullptr : new v8::Persistent(isolate_, obj)), + std::unique_ptr>(obj.IsEmpty() ? + nullptr : new Persistent(isolate_, obj)), ref }); immediate_info()->count_inc(1); diff --git a/src/env.cc b/src/env.cc index ca023125c2627c..455f5980731f3f 100644 --- a/src/env.cc +++ b/src/env.cc @@ -296,13 +296,24 @@ void Environment::RunAndClearNativeImmediates() { size_t ref_count = 0; std::vector list; native_immediate_callbacks_.swap(list); - for (const auto& cb : list) { - cb.cb_(this, cb.data_); - if (cb.keep_alive_) - cb.keep_alive_->Reset(); - if (cb.refed_) - ref_count++; - } + auto drain_list = [&]() { + v8::TryCatch try_catch(isolate()); + for (auto it = list.begin(); it != list.end(); ++it) { + it->cb_(this, it->data_); + if (it->refed_) + ref_count++; + if (UNLIKELY(try_catch.HasCaught())) { + FatalException(isolate(), try_catch); + // Bail out, remove the already executed callbacks from list + // and set up a new TryCatch for the other pending callbacks. + std::move_backward(it, list.end(), list.begin() + (list.end() - it)); + list.resize(list.end() - it); + return true; + } + } + return false; + }; + while (drain_list()) {} #ifdef DEBUG CHECK_GE(immediate_info()->count(), count); diff --git a/src/env.h b/src/env.h index 490ec9baed7cb9..dcabf7f54fadef 100644 --- a/src/env.h +++ b/src/env.h @@ -54,7 +54,26 @@ class performance_state; namespace loader { class ModuleWrap; -} + +struct Exists { + enum Bool { Yes, No }; +}; + +struct IsValid { + enum Bool { Yes, No }; +}; + +struct HasMain { + enum Bool { Yes, No }; +}; + +struct PackageConfig { + const Exists::Bool exists; + const IsValid::Bool is_valid; + const HasMain::Bool has_main; + const std::string main; +}; +} // namespace loader // Pick an index that's hopefully out of the way when we're embedded inside // another application. Performance-wise or memory-wise it doesn't matter: @@ -151,6 +170,7 @@ class ModuleWrap; V(fd_string, "fd") \ V(file_string, "file") \ V(fingerprint_string, "fingerprint") \ + V(fingerprint256_string, "fingerprint256") \ V(flags_string, "flags") \ V(get_data_clone_error_string, "_getDataCloneError") \ V(get_shared_array_buffer_id_string, "_getSharedArrayBufferId") \ @@ -602,6 +622,8 @@ class Environment { std::unordered_multimap module_map; + std::unordered_map package_json_cache; + inline double* heap_statistics_buffer() const; inline void set_heap_statistics_buffer(double* pointer); @@ -799,7 +821,7 @@ class Environment { struct NativeImmediateCallback { native_immediate_callback cb_; void* data_; - std::unique_ptr> keep_alive_; + std::unique_ptr> keep_alive_; bool refed_; }; std::vector native_immediate_callbacks_; @@ -810,8 +832,7 @@ class Environment { v8::Local promise, v8::Local parent); -#define V(PropertyName, TypeName) \ - v8::Persistent PropertyName ## _; +#define V(PropertyName, TypeName) Persistent PropertyName ## _; ENVIRONMENT_STRONG_PERSISTENT_PROPERTIES(V) #undef V diff --git a/src/handle_wrap.cc b/src/handle_wrap.cc index 7dcafa2ce6e842..a3b0209eb3121f 100644 --- a/src/handle_wrap.cc +++ b/src/handle_wrap.cc @@ -98,11 +98,6 @@ HandleWrap::HandleWrap(Environment* env, } -HandleWrap::~HandleWrap() { - CHECK(persistent().IsEmpty()); -} - - void HandleWrap::OnClose(uv_handle_t* handle) { HandleWrap* wrap = static_cast(handle->data); Environment* env = wrap->env(); @@ -120,7 +115,6 @@ void HandleWrap::OnClose(uv_handle_t* handle) { wrap->MakeCallback(env->onclose_string(), 0, nullptr); ClearWrap(wrap->object()); - wrap->persistent().Reset(); delete wrap; } diff --git a/src/handle_wrap.h b/src/handle_wrap.h index 19fd36891a2fed..e7a335f5140253 100644 --- a/src/handle_wrap.h +++ b/src/handle_wrap.h @@ -75,7 +75,6 @@ class HandleWrap : public AsyncWrap { v8::Local object, uv_handle_t* handle, AsyncWrap::ProviderType provider); - ~HandleWrap() override; private: friend class Environment; diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index 7ba1a144711524..e143d316d2e6fa 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -30,7 +30,6 @@ using v8::HandleScope; using v8::Isolate; using v8::Local; using v8::Object; -using v8::Persistent; using v8::String; using v8::Value; diff --git a/src/inspector_agent.h b/src/inspector_agent.h index 0555f5e18c2129..56fb407930fac5 100644 --- a/src/inspector_agent.h +++ b/src/inspector_agent.h @@ -97,7 +97,7 @@ class Agent { private: void ToggleAsyncHook(v8::Isolate* isolate, - const v8::Persistent& fn); + const Persistent& fn); node::Environment* parent_env_; std::unique_ptr client_; @@ -109,8 +109,8 @@ class Agent { bool pending_enable_async_hook_; bool pending_disable_async_hook_; - v8::Persistent enable_async_hook_function_; - v8::Persistent disable_async_hook_function_; + Persistent enable_async_hook_function_; + Persistent disable_async_hook_function_; }; } // namespace inspector diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 428d8391f2581a..1cced9420aea6c 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -19,7 +19,6 @@ using v8::Local; using v8::MaybeLocal; using v8::NewStringType; using v8::Object; -using v8::Persistent; using v8::String; using v8::Value; @@ -85,10 +84,6 @@ class JSBindingsConnection : public AsyncWrap { inspector->Connect(&delegate_); } - ~JSBindingsConnection() override { - callback_.Reset(); - } - void OnMessage(Local value) { MakeCallback(callback_.Get(env()->isolate()), 1, &value); } @@ -112,7 +107,6 @@ class JSBindingsConnection : public AsyncWrap { delegate_.Disconnect(); if (!persistent().IsEmpty()) { ClearWrap(object()); - persistent().Reset(); } delete this; } diff --git a/src/module_wrap.cc b/src/module_wrap.cc index 24e73cc3e101c6..175475b738b2aa 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -59,9 +59,6 @@ ModuleWrap::~ModuleWrap() { break; } } - - module_.Reset(); - context_.Reset(); } void ModuleWrap::New(const FunctionCallbackInfo& args) { @@ -215,8 +212,6 @@ void ModuleWrap::Instantiate(const FunctionCallbackInfo& args) { module->InstantiateModule(context, ModuleWrap::ResolveCallback); // clear resolve cache on instantiate - for (auto& entry : obj->resolve_cache_) - entry.second.Reset(); obj->resolve_cache_.clear(); if (!ok.FromMaybe(false)) { @@ -456,10 +451,9 @@ enum CheckFileOptions { CLOSE_AFTER_CHECK }; -Maybe CheckFile(const URL& search, +Maybe CheckFile(const std::string& path, CheckFileOptions opt = CLOSE_AFTER_CHECK) { uv_fs_t fs_req; - std::string path = search.ToFilePath(); if (path.empty()) { return Nothing(); } @@ -476,19 +470,74 @@ Maybe CheckFile(const URL& search, uv_fs_req_cleanup(&fs_req); if (is_directory) { - uv_fs_close(nullptr, &fs_req, fd, nullptr); + CHECK_EQ(0, uv_fs_close(nullptr, &fs_req, fd, nullptr)); uv_fs_req_cleanup(&fs_req); return Nothing(); } if (opt == CLOSE_AFTER_CHECK) { - uv_fs_close(nullptr, &fs_req, fd, nullptr); + CHECK_EQ(0, uv_fs_close(nullptr, &fs_req, fd, nullptr)); uv_fs_req_cleanup(&fs_req); } return Just(fd); } +const PackageConfig& GetPackageConfig(Environment* env, + const std::string path) { + auto existing = env->package_json_cache.find(path); + if (existing != env->package_json_cache.end()) { + return existing->second; + } + Maybe check = CheckFile(path, LEAVE_OPEN_AFTER_CHECK); + if (check.IsNothing()) { + auto entry = env->package_json_cache.emplace(path, + PackageConfig { Exists::No, IsValid::Yes, HasMain::No, "" }); + return entry.first->second; + } + + Isolate* isolate = env->isolate(); + v8::HandleScope handle_scope(isolate); + + std::string pkg_src = ReadFile(check.FromJust()); + uv_fs_t fs_req; + CHECK_EQ(0, uv_fs_close(nullptr, &fs_req, check.FromJust(), nullptr)); + uv_fs_req_cleanup(&fs_req); + + Local src; + if (!String::NewFromUtf8(isolate, + pkg_src.c_str(), + v8::NewStringType::kNormal, + pkg_src.length()).ToLocal(&src)) { + auto entry = env->package_json_cache.emplace(path, + PackageConfig { Exists::No, IsValid::Yes, HasMain::No, "" }); + return entry.first->second; + } + + Local pkg_json_v; + Local pkg_json; + + if (!JSON::Parse(env->context(), src).ToLocal(&pkg_json_v) || + !pkg_json_v->ToObject(env->context()).ToLocal(&pkg_json)) { + auto entry = env->package_json_cache.emplace(path, + PackageConfig { Exists::Yes, IsValid::No, HasMain::No, "" }); + return entry.first->second; + } + + Local pkg_main; + HasMain::Bool has_main = HasMain::No; + std::string main_std; + if (pkg_json->Get(env->context(), env->main_string()).ToLocal(&pkg_main)) { + has_main = HasMain::Yes; + Utf8Value main_utf8(isolate, pkg_main); + main_std.assign(std::string(*main_utf8, main_utf8.length())); + } + + auto entry = env->package_json_cache.emplace(path, + PackageConfig { Exists::Yes, IsValid::Yes, has_main, main_std }); + return entry.first->second; +} + enum ResolveExtensionsOptions { TRY_EXACT_NAME, ONLY_VIA_EXTENSIONS @@ -497,7 +546,8 @@ enum ResolveExtensionsOptions { template Maybe ResolveExtensions(const URL& search) { if (options == TRY_EXACT_NAME) { - Maybe check = CheckFile(search); + std::string filePath = search.ToFilePath(); + Maybe check = CheckFile(filePath); if (!check.IsNothing()) { return Just(search); } @@ -505,7 +555,7 @@ Maybe ResolveExtensions(const URL& search) { for (const char* extension : EXTENSIONS) { URL guess(search.path() + extension, &search); - Maybe check = CheckFile(guess); + Maybe check = CheckFile(guess.ToFilePath()); if (!check.IsNothing()) { return Just(guess); } @@ -520,44 +570,20 @@ inline Maybe ResolveIndex(const URL& search) { Maybe ResolveMain(Environment* env, const URL& search) { URL pkg("package.json", &search); - Maybe check = CheckFile(pkg, LEAVE_OPEN_AFTER_CHECK); - if (check.IsNothing()) { - return Nothing(); - } - - Isolate* isolate = env->isolate(); - Local context = isolate->GetCurrentContext(); - std::string pkg_src = ReadFile(check.FromJust()); - uv_fs_t fs_req; - uv_fs_close(nullptr, &fs_req, check.FromJust(), nullptr); - uv_fs_req_cleanup(&fs_req); - - // It's not okay for the called of this method to not be able to tell - // whether an exception is pending or not. - TryCatch try_catch(isolate); - Local src; - if (!String::NewFromUtf8(isolate, - pkg_src.c_str(), - v8::NewStringType::kNormal, - pkg_src.length()).ToLocal(&src)) { + const PackageConfig& pjson = + GetPackageConfig(env, pkg.ToFilePath()); + // Note invalid package.json should throw in resolver + // currently we silently ignore which is incorrect + if (pjson.exists == Exists::No || + pjson.is_valid == IsValid::No || + pjson.has_main == HasMain::No) { return Nothing(); } - - Local pkg_json; - if (!JSON::Parse(context, src).ToLocal(&pkg_json) || !pkg_json->IsObject()) - return Nothing(); - Local pkg_main; - if (!pkg_json.As()->Get(context, env->main_string()) - .ToLocal(&pkg_main) || !pkg_main->IsString()) { - return Nothing(); + if (!ShouldBeTreatedAsRelativeOrAbsolutePath(pjson.main)) { + return Resolve(env, "./" + pjson.main, search, IgnoreMain); } - Utf8Value main_utf8(isolate, pkg_main.As()); - std::string main_std(*main_utf8, main_utf8.length()); - if (!ShouldBeTreatedAsRelativeOrAbsolutePath(main_std)) { - main_std.insert(0, "./"); - } - return Resolve(env, main_std, search); + return Resolve(env, pjson.main, search, IgnoreMain); } Maybe ResolveModule(Environment* env, @@ -567,7 +593,8 @@ Maybe ResolveModule(Environment* env, URL dir(""); do { dir = parent; - Maybe check = Resolve(env, "./node_modules/" + specifier, dir, true); + Maybe check = + Resolve(env, "./node_modules/" + specifier, dir, CheckMain); if (!check.IsNothing()) { const size_t limit = specifier.find('/'); const size_t spec_len = @@ -589,8 +616,8 @@ Maybe ResolveModule(Environment* env, Maybe ResolveDirectory(Environment* env, const URL& search, - bool read_pkg_json) { - if (read_pkg_json) { + PackageMainCheck check_pjson_main) { + if (check_pjson_main) { Maybe main = ResolveMain(env, search); if (!main.IsNothing()) return main; @@ -600,15 +627,14 @@ Maybe ResolveDirectory(Environment* env, } // anonymous namespace - Maybe Resolve(Environment* env, const std::string& specifier, const URL& base, - bool read_pkg_json) { + PackageMainCheck check_pjson_main) { URL pure_url(specifier); if (!(pure_url.flags() & URL_FLAGS_FAILED)) { // just check existence, without altering - Maybe check = CheckFile(pure_url); + Maybe check = CheckFile(pure_url.ToFilePath()); if (check.IsNothing()) { return Nothing(); } @@ -625,7 +651,7 @@ Maybe Resolve(Environment* env, if (specifier.back() != '/') { resolved = URL(specifier + "/", base); } - return ResolveDirectory(env, resolved, read_pkg_json); + return ResolveDirectory(env, resolved, check_pjson_main); } else { return ResolveModule(env, specifier, base); } @@ -662,7 +688,7 @@ void ModuleWrap::Resolve(const FunctionCallbackInfo& args) { return; } - Maybe result = node::loader::Resolve(env, specifier_std, url, true); + Maybe result = node::loader::Resolve(env, specifier_std, url); if (result.IsNothing() || (result.FromJust().flags() & URL_FLAGS_FAILED)) { std::string msg = "Cannot find module " + specifier_std; env->ThrowError(msg.c_str()); diff --git a/src/module_wrap.h b/src/module_wrap.h index bedf665165c8f6..c7b1f3e4d4869d 100644 --- a/src/module_wrap.h +++ b/src/module_wrap.h @@ -12,10 +12,15 @@ namespace node { namespace loader { +enum PackageMainCheck : bool { + CheckMain = true, + IgnoreMain = false +}; + v8::Maybe Resolve(Environment* env, const std::string& specifier, const url::URL& base, - bool read_pkg_json = false); + PackageMainCheck read_pkg_json = CheckMain); class ModuleWrap : public BaseObject { public: @@ -49,11 +54,11 @@ class ModuleWrap : public BaseObject { v8::Local specifier, v8::Local referrer); - v8::Persistent module_; - v8::Persistent url_; + Persistent module_; + Persistent url_; bool linked_ = false; - std::unordered_map> resolve_cache_; - v8::Persistent context_; + std::unordered_map> resolve_cache_; + Persistent context_; }; } // namespace loader diff --git a/src/node.cc b/src/node.cc index ff98726ebbea0b..35a1d5cb598d5b 100644 --- a/src/node.cc +++ b/src/node.cc @@ -197,6 +197,8 @@ static node_module* modlist_linked; static node_module* modlist_addon; static bool trace_enabled = false; static std::string trace_enabled_categories; // NOLINT(runtime/string) +static std::string trace_file_pattern = // NOLINT(runtime/string) + "node_trace.${rotation}.log"; static bool abort_on_uncaught_exception = false; // Bit flag used to track security reverts (see node_revert.h) @@ -280,7 +282,7 @@ static struct { #if NODE_USE_V8_PLATFORM void Initialize(int thread_pool_size) { if (trace_enabled) { - tracing_agent_.reset(new tracing::Agent()); + tracing_agent_.reset(new tracing::Agent(trace_file_pattern)); platform_ = new NodePlatform(thread_pool_size, tracing_agent_->GetTracingController()); V8::InitializePlatform(platform_); @@ -2470,46 +2472,82 @@ node_module* get_linked_module(const char* name) { } struct DLib { - std::string filename_; +#ifdef __POSIX__ + static const int kDefaultFlags = RTLD_LAZY; +#else + static const int kDefaultFlags = 0; +#endif + + inline DLib(const char* filename, int flags) + : filename_(filename), flags_(flags), handle_(nullptr) {} + + inline bool Open(); + inline void Close(); + inline void* GetSymbolAddress(const char* name); + + const std::string filename_; + const int flags_; std::string errmsg_; void* handle_; - int flags_; +#ifndef __POSIX__ + uv_lib_t lib_; +#endif + + DISALLOW_COPY_AND_ASSIGN(DLib); +}; + #ifdef __POSIX__ - static const int kDefaultFlags = RTLD_LAZY; +bool DLib::Open() { + handle_ = dlopen(filename_.c_str(), flags_); + if (handle_ != nullptr) + return true; + errmsg_ = dlerror(); + return false; +} - bool Open() { - handle_ = dlopen(filename_.c_str(), flags_); - if (handle_ != nullptr) - return true; - errmsg_ = dlerror(); - return false; - } +void DLib::Close() { + if (handle_ == nullptr) return; + dlclose(handle_); + handle_ = nullptr; +} - void Close() { - if (handle_ != nullptr) - dlclose(handle_); - } +void* DLib::GetSymbolAddress(const char* name) { + return dlsym(handle_, name); +} #else // !__POSIX__ - static const int kDefaultFlags = 0; - uv_lib_t lib_; - - bool Open() { - int ret = uv_dlopen(filename_.c_str(), &lib_); - if (ret == 0) { - handle_ = static_cast(lib_.handle); - return true; - } - errmsg_ = uv_dlerror(&lib_); - uv_dlclose(&lib_); - return false; +bool DLib::Open() { + int ret = uv_dlopen(filename_.c_str(), &lib_); + if (ret == 0) { + handle_ = static_cast(lib_.handle); + return true; } + errmsg_ = uv_dlerror(&lib_); + uv_dlclose(&lib_); + return false; +} - void Close() { - uv_dlclose(&lib_); - } +void DLib::Close() { + if (handle_ == nullptr) return; + uv_dlclose(&lib_); + handle_ = nullptr; +} + +void* DLib::GetSymbolAddress(const char* name) { + void* address; + if (0 == uv_dlsym(&lib_, name, &address)) return address; + return nullptr; +} #endif // !__POSIX__ -}; + +using InitializerCallback = void (*)(Local exports, + Local module, + Local context); + +inline InitializerCallback GetInitializerCallback(DLib* dlib) { + const char* name = "node_register_module_v" STRINGIFY(NODE_MODULE_VERSION); + return reinterpret_cast(dlib->GetSymbolAddress(name)); +} // DLOpen is process.dlopen(module, filename, flags). // Used to load 'module.node' dynamically shared objects. @@ -2519,6 +2557,7 @@ struct DLib { // cache that's a plain C list or hash table that's shared across contexts? static void DLOpen(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); + auto context = env->context(); CHECK_EQ(modpending, nullptr); @@ -2528,16 +2567,21 @@ static void DLOpen(const FunctionCallbackInfo& args) { } int32_t flags = DLib::kDefaultFlags; - if (args.Length() > 2 && !args[2]->Int32Value(env->context()).To(&flags)) { + if (args.Length() > 2 && !args[2]->Int32Value(context).To(&flags)) { return env->ThrowTypeError("flag argument must be an integer."); } - Local module = - args[0]->ToObject(env->context()).ToLocalChecked(); // Cast + Local module; + Local exports; + Local exports_v; + if (!args[0]->ToObject(context).ToLocal(&module) || + !module->Get(context, env->exports_string()).ToLocal(&exports_v) || + !exports_v->ToObject(context).ToLocal(&exports)) { + return; // Exception pending. + } + node::Utf8Value filename(env->isolate(), args[1]); // Cast - DLib dlib; - dlib.filename_ = *filename; - dlib.flags_ = flags; + DLib dlib(*filename, flags); bool is_opened = dlib.Open(); // Objects containing v14 or later modules will have registered themselves @@ -2552,17 +2596,22 @@ static void DLOpen(const FunctionCallbackInfo& args) { #ifdef _WIN32 // Windows needs to add the filename into the error message errmsg = String::Concat(errmsg, - args[1]->ToString(env->context()).ToLocalChecked()); + args[1]->ToString(context).ToLocalChecked()); #endif // _WIN32 env->isolate()->ThrowException(Exception::Error(errmsg)); return; } if (mp == nullptr) { - dlib.Close(); - env->ThrowError("Module did not self-register."); + if (auto callback = GetInitializerCallback(&dlib)) { + callback(exports, module, context); + } else { + dlib.Close(); + env->ThrowError("Module did not self-register."); + } return; } + if (mp->nm_version == -1) { if (env->EmitNapiWarning()) { if (ProcessEmitWarning(env, "N-API is an experimental feature and could " @@ -2599,22 +2648,8 @@ static void DLOpen(const FunctionCallbackInfo& args) { mp->nm_link = modlist_addon; modlist_addon = mp; - Local exports_string = env->exports_string(); - MaybeLocal maybe_exports = - module->Get(env->context(), exports_string); - - if (maybe_exports.IsEmpty() || - maybe_exports.ToLocalChecked()->ToObject(env->context()).IsEmpty()) { - dlib.Close(); - return; - } - - Local exports = - maybe_exports.ToLocalChecked()->ToObject(env->context()) - .FromMaybe(Local()); - if (mp->nm_context_register_func != nullptr) { - mp->nm_context_register_func(exports, module, env->context(), mp->nm_priv); + mp->nm_context_register_func(exports, module, context, mp->nm_priv); } else if (mp->nm_register_func != nullptr) { mp->nm_register_func(exports, module, mp->nm_priv); } else { @@ -3722,6 +3757,10 @@ static void PrintHelp() { " --trace-events-enabled track trace events\n" " --trace-event-categories comma separated list of trace event\n" " categories to record\n" + " --trace-event-file-pattern Template string specifying the\n" + " filepath for the trace-events data, it\n" + " supports ${rotation} and ${pid}\n" + " log-rotation id. %%2$u is the pid.\n" " --track-heap-objects track heap object allocations for heap " "snapshots\n" " --prof-process process v8 profiler output generated\n" @@ -3850,6 +3889,7 @@ static void CheckIfAllowedInEnv(const char* exe, bool is_env, "--no-force-async-hooks-checks", "--trace-events-enabled", "--trace-event-categories", + "--trace-event-file-pattern", "--track-heap-objects", "--zero-fill-buffers", "--v8-pool-size", @@ -4001,6 +4041,14 @@ static void ParseArgs(int* argc, } args_consumed += 1; trace_enabled_categories = categories; + } else if (strcmp(arg, "--trace-event-file-pattern") == 0) { + const char* file_pattern = argv[index + 1]; + if (file_pattern == nullptr) { + fprintf(stderr, "%s: %s requires an argument\n", argv[0], arg); + exit(9); + } + args_consumed += 1; + trace_file_pattern = file_pattern; } else if (strcmp(arg, "--track-heap-objects") == 0) { track_heap_objects = true; } else if (strcmp(arg, "--throw-deprecation") == 0) { diff --git a/src/node.h b/src/node.h index 89dbdfc727b0c5..44d9ca9c77c18b 100644 --- a/src/node.h +++ b/src/node.h @@ -532,6 +532,9 @@ extern "C" NODE_EXTERN void node_module_register(void* mod); } \ } +// Usage: `NODE_MODULE(NODE_GYP_MODULE_NAME, InitializerFunction)` +// If no NODE_MODULE is declared, Node.js looks for the well-known +// symbol `node_register_module_v${NODE_MODULE_VERSION}`. #define NODE_MODULE(modname, regfunc) \ NODE_MODULE_X(modname, regfunc, NULL, 0) // NOLINT (readability/null_usage) diff --git a/src/node_api.cc b/src/node_api.cc index 2c5f3066f728b1..63ce1d8e86955e 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -31,17 +31,11 @@ struct napi_env__ { : isolate(_isolate), last_error(), loop(_loop) {} - ~napi_env__() { - last_exception.Reset(); - wrap_template.Reset(); - function_data_template.Reset(); - accessor_data_template.Reset(); - } v8::Isolate* isolate; - v8::Persistent last_exception; - v8::Persistent wrap_template; - v8::Persistent function_data_template; - v8::Persistent accessor_data_template; + node::Persistent last_exception; + node::Persistent wrap_template; + node::Persistent function_data_template; + node::Persistent accessor_data_template; napi_extended_error_info last_error; int open_handle_scopes = 0; int open_callback_scopes = 0; @@ -274,13 +268,13 @@ static_assert(sizeof(v8::Local) == sizeof(napi_value), "Cannot convert between v8::Local and napi_value"); static -napi_deferred JsDeferredFromV8Persistent(v8::Persistent* local) { +napi_deferred JsDeferredFromNodePersistent(node::Persistent* local) { return reinterpret_cast(local); } static -v8::Persistent* V8PersistentFromJsDeferred(napi_deferred local) { - return reinterpret_cast*>(local); +node::Persistent* NodePersistentFromJsDeferred(napi_deferred local) { + return reinterpret_cast*>(local); } static @@ -360,7 +354,7 @@ class Finalizer { void* _finalize_hint; }; -// Wrapper around v8::Persistent that implements reference counting. +// Wrapper around node::Persistent that implements reference counting. class Reference : private Finalizer { private: Reference(napi_env env, @@ -381,16 +375,6 @@ class Reference : private Finalizer { } } - ~Reference() { - // The V8 Persistent class currently does not reset in its destructor: - // see NonCopyablePersistentTraits::kResetInDestructor = false. - // (Comments there claim that might change in the future.) - // To avoid memory leaks, it is better to reset at this time, however - // care must be taken to avoid attempting this after the Isolate has - // shut down, for example via a static (atexit) destructor. - _persistent.Reset(); - } - public: void* Data() { return _finalize_data; @@ -470,7 +454,7 @@ class Reference : private Finalizer { } } - v8::Persistent _persistent; + node::Persistent _persistent; uint32_t _refcount; bool _delete_self; }; @@ -846,8 +830,8 @@ napi_status ConcludeDeferred(napi_env env, CHECK_ARG(env, result); v8::Local context = env->isolate->GetCurrentContext(); - v8::Persistent* deferred_ref = - V8PersistentFromJsDeferred(deferred); + node::Persistent* deferred_ref = + NodePersistentFromJsDeferred(deferred); v8::Local v8_deferred = v8::Local::New(env->isolate, *deferred_ref); @@ -857,7 +841,6 @@ napi_status ConcludeDeferred(napi_env env, v8_resolver->Resolve(context, v8impl::V8LocalValueFromJsValue(result)) : v8_resolver->Reject(context, v8impl::V8LocalValueFromJsValue(result)); - deferred_ref->Reset(); delete deferred_ref; RETURN_STATUS_IF_FALSE(env, success.FromMaybe(false), napi_generic_failure); @@ -3493,10 +3476,10 @@ napi_status napi_create_promise(napi_env env, CHECK_MAYBE_EMPTY(env, maybe, napi_generic_failure); auto v8_resolver = maybe.ToLocalChecked(); - auto v8_deferred = new v8::Persistent(); + auto v8_deferred = new node::Persistent(); v8_deferred->Reset(env->isolate, v8_resolver); - *deferred = v8impl::JsDeferredFromV8Persistent(v8_deferred); + *deferred = v8impl::JsDeferredFromNodePersistent(v8_deferred); *promise = v8impl::JsValueFromV8LocalValue(v8_resolver->GetPromise()); return GET_RETURN_STATUS(env); } diff --git a/src/node_buffer.cc b/src/node_buffer.cc index dff9d3c0995e02..f9a807602f612c 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -78,7 +78,6 @@ using v8::Local; using v8::Maybe; using v8::MaybeLocal; using v8::Object; -using v8::Persistent; using v8::String; using v8::Uint32Array; using v8::Uint8Array; @@ -103,7 +102,6 @@ class CallbackInfo { FreeCallback callback, char* data, void* hint); - ~CallbackInfo(); Persistent persistent_; FreeCallback const callback_; char* const data_; @@ -147,11 +145,6 @@ CallbackInfo::CallbackInfo(Isolate* isolate, } -CallbackInfo::~CallbackInfo() { - persistent_.Reset(); -} - - void CallbackInfo::WeakCallback( const WeakCallbackInfo& data) { CallbackInfo* self = data.GetParameter(); diff --git a/src/node_contextify.cc b/src/node_contextify.cc index aac72514575695..f49a2362769bc2 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -50,7 +50,6 @@ using v8::NamedPropertyHandlerConfiguration; using v8::Nothing; using v8::Object; using v8::ObjectTemplate; -using v8::Persistent; using v8::PropertyAttribute; using v8::PropertyCallbackInfo; using v8::PropertyDescriptor; @@ -110,11 +109,6 @@ ContextifyContext::ContextifyContext( } -ContextifyContext::~ContextifyContext() { - context_.Reset(); -} - - // This is an object that just keeps an internal pointer to this // ContextifyContext. It's passed to the NamedPropertyHandler. If we // pass the main JavaScript context object we're embedded in, then the @@ -1158,11 +1152,6 @@ class ContextifyScript : public BaseObject { : BaseObject(env, object) { MakeWeak(this); } - - - ~ContextifyScript() override { - script_.Reset(); - } }; diff --git a/src/node_contextify.h b/src/node_contextify.h index c2b5b4dd9c93aa..e6b7e0a9e080f2 100644 --- a/src/node_contextify.h +++ b/src/node_contextify.h @@ -15,13 +15,12 @@ class ContextifyContext { enum { kSandboxObjectIndex = 1 }; Environment* const env_; - v8::Persistent context_; + Persistent context_; public: ContextifyContext(Environment* env, v8::Local sandbox_obj, v8::Local options_obj); - ~ContextifyContext(); v8::Local CreateDataWrapper(Environment* env); v8::Local CreateV8Context(Environment* env, diff --git a/src/node_crypto.cc b/src/node_crypto.cc index a398f745537b54..3e630926b5547a 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -43,12 +43,14 @@ // StartComAndWoSignData.inc #include "StartComAndWoSignData.inc" -#include #include #include // INT_MAX #include #include #include + +#include +#include #include #define THROW_AND_RETURN_IF_NOT_STRING_OR_BUFFER(val, prefix) \ @@ -107,7 +109,6 @@ using v8::MaybeLocal; using v8::Null; using v8::Object; using v8::ObjectTemplate; -using v8::Persistent; using v8::PropertyAttribute; using v8::ReadOnly; using v8::Signature; @@ -115,6 +116,12 @@ using v8::String; using v8::Value; +struct StackOfX509Deleter { + void operator()(STACK_OF(X509)* p) const { sk_X509_pop_free(p, X509_free); } +}; + +using StackOfX509 = std::unique_ptr; + #if OPENSSL_VERSION_NUMBER < 0x10100000L static void RSA_get0_key(const RSA* r, const BIGNUM** n, const BIGNUM** e, const BIGNUM** d) { @@ -839,17 +846,15 @@ int SSL_CTX_use_certificate_chain(SSL_CTX* ctx, int ret = 0; unsigned long err = 0; // NOLINT(runtime/int) - // Read extra certs - STACK_OF(X509)* extra_certs = sk_X509_new_null(); - if (extra_certs == nullptr) { + StackOfX509 extra_certs(sk_X509_new_null()); + if (!extra_certs) goto done; - } while ((extra = PEM_read_bio_X509(in, nullptr, NoPasswordCallback, nullptr))) { - if (sk_X509_push(extra_certs, extra)) + if (sk_X509_push(extra_certs.get(), extra)) continue; // Failure, free all certs @@ -867,13 +872,11 @@ int SSL_CTX_use_certificate_chain(SSL_CTX* ctx, goto done; } - ret = SSL_CTX_use_certificate_chain(ctx, x, extra_certs, cert, issuer); + ret = SSL_CTX_use_certificate_chain(ctx, x, extra_certs.get(), cert, issuer); if (!ret) goto done; done: - if (extra_certs != nullptr) - sk_X509_pop_free(extra_certs, X509_free); if (extra != nullptr) X509_free(extra); if (x != nullptr) @@ -1812,6 +1815,25 @@ static bool SafeX509ExtPrint(BIO* out, X509_EXTENSION* ext) { } +static void AddFingerprintDigest(const unsigned char* md, + unsigned int md_size, + char (*fingerprint)[3 * EVP_MAX_MD_SIZE + 1]) { + unsigned int i; + const char hex[] = "0123456789ABCDEF"; + + for (i = 0; i < md_size; i++) { + (*fingerprint)[3*i] = hex[(md[i] & 0xf0) >> 4]; + (*fingerprint)[(3*i)+1] = hex[(md[i] & 0x0f)]; + (*fingerprint)[(3*i)+2] = ':'; + } + + if (md_size > 0) { + (*fingerprint)[(3*(md_size-1))+2] = '\0'; + } else { + (*fingerprint)[0] = '\0'; + } +} + static Local X509ToObject(Environment* env, X509* cert) { EscapableHandleScope scope(env->isolate()); Local context = env->context(); @@ -1928,26 +1950,18 @@ static Local X509ToObject(Environment* env, X509* cert) { mem->length)).FromJust(); BIO_free_all(bio); - unsigned int md_size, i; unsigned char md[EVP_MAX_MD_SIZE]; + unsigned int md_size; + char fingerprint[EVP_MAX_MD_SIZE * 3 + 1]; if (X509_digest(cert, EVP_sha1(), md, &md_size)) { - const char hex[] = "0123456789ABCDEF"; - char fingerprint[EVP_MAX_MD_SIZE * 3]; - - for (i = 0; i < md_size; i++) { - fingerprint[3*i] = hex[(md[i] & 0xf0) >> 4]; - fingerprint[(3*i)+1] = hex[(md[i] & 0x0f)]; - fingerprint[(3*i)+2] = ':'; - } - - if (md_size > 0) { - fingerprint[(3*(md_size-1))+2] = '\0'; - } else { - fingerprint[0] = '\0'; - } - - info->Set(context, env->fingerprint_string(), - OneByteString(env->isolate(), fingerprint)).FromJust(); + AddFingerprintDigest(md, md_size, &fingerprint); + info->Set(context, env->fingerprint_string(), + OneByteString(env->isolate(), fingerprint)).FromJust(); + } + if (X509_digest(cert, EVP_sha256(), md, &md_size)) { + AddFingerprintDigest(md, md_size, &fingerprint); + info->Set(context, env->fingerprint256_string(), + OneByteString(env->isolate(), fingerprint)).FromJust(); } STACK_OF(ASN1_OBJECT)* eku = static_cast( @@ -1991,109 +2005,128 @@ static Local X509ToObject(Environment* env, X509* cert) { } -// TODO(indutny): Split it into multiple smaller functions +static Local AddIssuerChainToObject(X509** cert, + Local object, + StackOfX509 peer_certs, + Environment* const env) { + Local context = env->isolate()->GetCurrentContext(); + *cert = sk_X509_delete(peer_certs.get(), 0); + for (;;) { + int i; + for (i = 0; i < sk_X509_num(peer_certs.get()); i++) { + X509* ca = sk_X509_value(peer_certs.get(), i); + if (X509_check_issued(ca, *cert) != X509_V_OK) + continue; + + Local ca_info = X509ToObject(env, ca); + object->Set(context, env->issuercert_string(), ca_info).FromJust(); + object = ca_info; + + // NOTE: Intentionally freeing cert that is not used anymore. + X509_free(*cert); + + // Delete cert and continue aggregating issuers. + *cert = sk_X509_delete(peer_certs.get(), i); + break; + } + + // Issuer not found, break out of the loop. + if (i == sk_X509_num(peer_certs.get())) + break; + } + return object; +} + + +static StackOfX509 CloneSSLCerts(X509** cert, + const STACK_OF(X509)* const ssl_certs) { + StackOfX509 peer_certs(sk_X509_new(nullptr)); + if (*cert != nullptr) + sk_X509_push(peer_certs.get(), *cert); + for (int i = 0; i < sk_X509_num(ssl_certs); i++) { + *cert = X509_dup(sk_X509_value(ssl_certs, i)); + if (*cert == nullptr) + return StackOfX509(); + if (!sk_X509_push(peer_certs.get(), *cert)) + return StackOfX509(); + } + return peer_certs; +} + + +static Local GetLastIssuedCert(X509** cert, + const SSL* const ssl, + Local issuer_chain, + Environment* const env) { + Local context = env->isolate()->GetCurrentContext(); + while (X509_check_issued(*cert, *cert) != X509_V_OK) { + X509* ca; + if (SSL_CTX_get_issuer(SSL_get_SSL_CTX(ssl), *cert, &ca) <= 0) + break; + + Local ca_info = X509ToObject(env, ca); + issuer_chain->Set(context, env->issuercert_string(), ca_info).FromJust(); + issuer_chain = ca_info; + + // NOTE: Intentionally freeing cert that is not used anymore. + X509_free(*cert); + + // Delete cert and continue aggregating issuers. + *cert = ca; + } + return issuer_chain; +} + + template void SSLWrap::GetPeerCertificate( const FunctionCallbackInfo& args) { Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); - Local context = env->context(); ClearErrorOnReturn clear_error_on_return; Local result; - Local info; + // Used to build the issuer certificate chain. + Local issuer_chain; // NOTE: This is because of the odd OpenSSL behavior. On client `cert_chain` - // contains the `peer_certificate`, but on server it doesn't + // contains the `peer_certificate`, but on server it doesn't. X509* cert = w->is_server() ? SSL_get_peer_certificate(w->ssl_) : nullptr; STACK_OF(X509)* ssl_certs = SSL_get_peer_cert_chain(w->ssl_); - STACK_OF(X509)* peer_certs = nullptr; - if (cert == nullptr && ssl_certs == nullptr) + if (cert == nullptr && (ssl_certs == nullptr || sk_X509_num(ssl_certs) == 0)) goto done; - if (cert == nullptr && sk_X509_num(ssl_certs) == 0) - goto done; - - // Short result requested + // Short result requested. if (args.Length() < 1 || !args[0]->IsTrue()) { - result = X509ToObject(env, - cert == nullptr ? sk_X509_value(ssl_certs, 0) : cert); + X509* target_cert = cert; + if (target_cert == nullptr) + target_cert = sk_X509_value(ssl_certs, 0); + result = X509ToObject(env, target_cert); goto done; } - // Clone `ssl_certs`, because we are going to destruct it - peer_certs = sk_X509_new(nullptr); - if (cert != nullptr) - sk_X509_push(peer_certs, cert); - for (int i = 0; i < sk_X509_num(ssl_certs); i++) { - cert = X509_dup(sk_X509_value(ssl_certs, i)); - if (cert == nullptr) - goto done; - if (!sk_X509_push(peer_certs, cert)) - goto done; - } - - // First and main certificate - cert = sk_X509_value(peer_certs, 0); - result = X509ToObject(env, cert); - info = result; + if (auto peer_certs = CloneSSLCerts(&cert, ssl_certs)) { + // First and main certificate. + cert = sk_X509_value(peer_certs.get(), 0); + result = X509ToObject(env, cert); - // Put issuer inside the object - cert = sk_X509_delete(peer_certs, 0); - while (sk_X509_num(peer_certs) > 0) { - int i; - for (i = 0; i < sk_X509_num(peer_certs); i++) { - X509* ca = sk_X509_value(peer_certs, i); - if (X509_check_issued(ca, cert) != X509_V_OK) - continue; + issuer_chain = + AddIssuerChainToObject(&cert, result, std::move(peer_certs), env); + issuer_chain = GetLastIssuedCert(&cert, w->ssl_, issuer_chain, env); + // Last certificate should be self-signed. + if (X509_check_issued(cert, cert) == X509_V_OK) + issuer_chain->Set(env->context(), + env->issuercert_string(), + issuer_chain).FromJust(); - Local ca_info = X509ToObject(env, ca); - info->Set(context, env->issuercert_string(), ca_info).FromJust(); - info = ca_info; - - // NOTE: Intentionally freeing cert that is not used anymore - X509_free(cert); - - // Delete cert and continue aggregating issuers - cert = sk_X509_delete(peer_certs, i); - break; - } - - // Issuer not found, break out of the loop - if (i == sk_X509_num(peer_certs)) - break; - } - - // Last certificate should be self-signed - while (X509_check_issued(cert, cert) != X509_V_OK) { - X509* ca; - if (SSL_CTX_get_issuer(SSL_get_SSL_CTX(w->ssl_), cert, &ca) <= 0) - break; - - Local ca_info = X509ToObject(env, ca); - info->Set(context, env->issuercert_string(), ca_info).FromJust(); - info = ca_info; - - // NOTE: Intentionally freeing cert that is not used anymore - X509_free(cert); - - // Delete cert and continue aggregating issuers - cert = ca; + CHECK_NE(cert, nullptr); } - // Self-issued certificate - if (X509_check_issued(cert, cert) == X509_V_OK) - info->Set(context, env->issuercert_string(), info).FromJust(); - - CHECK_NE(cert, nullptr); - done: if (cert != nullptr) X509_free(cert); - if (peer_certs != nullptr) - sk_X509_pop_free(peer_certs, X509_free); if (result.IsEmpty()) result = Object::New(env->isolate()); args.GetReturnValue().Set(result); @@ -2793,7 +2826,6 @@ void SSLWrap::CertCbDone(const FunctionCallbackInfo& args) { if (cons->HasInstance(ctx)) { SecureContext* sc; ASSIGN_OR_RETURN_UNWRAP(&sc, ctx.As()); - w->sni_context_.Reset(); w->sni_context_.Reset(env->isolate(), ctx); int rv; @@ -3145,25 +3177,23 @@ inline CheckResult CheckWhitelistedServerCert(X509_STORE_CTX* ctx) { unsigned char hash[CNNIC_WHITELIST_HASH_LEN]; unsigned int hashlen = CNNIC_WHITELIST_HASH_LEN; - STACK_OF(X509)* chain = X509_STORE_CTX_get1_chain(ctx); - CHECK_NE(chain, nullptr); - CHECK_GT(sk_X509_num(chain), 0); + StackOfX509 chain(X509_STORE_CTX_get1_chain(ctx)); + CHECK(chain); + CHECK_GT(sk_X509_num(chain.get()), 0); // Take the last cert as root at the first time. - X509* root_cert = sk_X509_value(chain, sk_X509_num(chain)-1); + X509* root_cert = sk_X509_value(chain.get(), sk_X509_num(chain.get())-1); X509_NAME* root_name = X509_get_subject_name(root_cert); if (!IsSelfSigned(root_cert)) { - root_cert = FindRoot(chain); + root_cert = FindRoot(chain.get()); CHECK_NE(root_cert, nullptr); root_name = X509_get_subject_name(root_cert); } - X509* leaf_cert = sk_X509_value(chain, 0); - if (!CheckStartComOrWoSign(root_name, leaf_cert)) { - sk_X509_pop_free(chain, X509_free); + X509* leaf_cert = sk_X509_value(chain.get(), 0); + if (!CheckStartComOrWoSign(root_name, leaf_cert)) return CHECK_CERT_REVOKED; - } // When the cert is issued from either CNNNIC ROOT CA or CNNNIC EV // ROOT CA, check a hash of its leaf cert if it is in the whitelist. @@ -3176,13 +3206,10 @@ inline CheckResult CheckWhitelistedServerCert(X509_STORE_CTX* ctx) { void* result = bsearch(hash, WhitelistedCNNICHashes, arraysize(WhitelistedCNNICHashes), CNNIC_WHITELIST_HASH_LEN, compar); - if (result == nullptr) { - sk_X509_pop_free(chain, X509_free); + if (result == nullptr) return CHECK_CERT_REVOKED; - } } - sk_X509_pop_free(chain, X509_free); return CHECK_OK; } @@ -3590,7 +3617,8 @@ void Connection::GetServername(const FunctionCallbackInfo& args) { ASSIGN_OR_RETURN_UNWRAP(&conn, args.Holder()); if (conn->is_server() && !conn->servername_.IsEmpty()) { - args.GetReturnValue().Set(conn->servername_); + args.GetReturnValue().Set( + PersistentToLocal(args.GetIsolate(), conn->servername_)); } else { args.GetReturnValue().Set(false); } @@ -5551,7 +5579,6 @@ class PBKDF2Request : public AsyncWrap { keylen_ = 0; ClearWrap(object()); - persistent().Reset(); } uv_work_t* work_req() { @@ -5718,7 +5745,6 @@ class RandomBytesRequest : public AsyncWrap { ~RandomBytesRequest() override { ClearWrap(object()); - persistent().Reset(); } uv_work_t* work_req() { diff --git a/src/node_crypto.h b/src/node_crypto.h index b866117f844358..f1efa811985681 100644 --- a/src/node_crypto.h +++ b/src/node_crypto.h @@ -225,14 +225,6 @@ class SSLWrap { SSL_SESSION_free(next_sess_); next_sess_ = nullptr; } - -#ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB - sni_context_.Reset(); -#endif - -#ifdef NODE__HAVE_TLSEXT_STATUS_CB - ocsp_response_.Reset(); -#endif // NODE__HAVE_TLSEXT_STATUS_CB } inline SSL* ssl() const { return ssl_; } @@ -354,11 +346,11 @@ class SSLWrap { ClientHelloParser hello_parser_; #ifdef NODE__HAVE_TLSEXT_STATUS_CB - v8::Persistent ocsp_response_; + Persistent ocsp_response_; #endif // NODE__HAVE_TLSEXT_STATUS_CB #ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB - v8::Persistent sni_context_; + Persistent sni_context_; #endif friend class SecureContext; @@ -380,13 +372,13 @@ class Connection : public AsyncWrap, public SSLWrap { void NewSessionDoneCb(); #ifndef OPENSSL_NO_NEXTPROTONEG - v8::Persistent npnProtos_; - v8::Persistent selectedNPNProto_; + Persistent npnProtos_; + Persistent selectedNPNProto_; #endif #ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB - v8::Persistent sniObject_; - v8::Persistent servername_; + Persistent sniObject_; + Persistent servername_; #endif size_t self_size() const override { return sizeof(*this); } diff --git a/src/node_file.cc b/src/node_file.cc index 9df13be5bd2dff..10655e54e54e90 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -366,7 +366,8 @@ class fs_req_wrap { After(uv_req); \ req_wrap = nullptr; \ } else { \ - args.GetReturnValue().Set(req_wrap->persistent()); \ + args.GetReturnValue().Set( \ + PersistentToLocal(env->isolate(), req_wrap->persistent())); \ } #define ASYNC_CALL(func, req, encoding, ...) \ @@ -1140,7 +1141,8 @@ static void WriteString(const FunctionCallbackInfo& args) { return; } - return args.GetReturnValue().Set(req_wrap->persistent()); + return args.GetReturnValue().Set( + PersistentToLocal(env->isolate(), req_wrap->persistent())); } diff --git a/src/node_http2.cc b/src/node_http2.cc index 6f59c119e53a6b..7650969f8639ce 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -278,8 +278,6 @@ Http2Session::Http2Settings::Http2Settings( Http2Session::Http2Settings::~Http2Settings() { if (!object().IsEmpty()) ClearWrap(object()); - persistent().Reset(); - CHECK(persistent().IsEmpty()); } // Generates a Buffer that contains the serialized payload of a SETTINGS @@ -533,10 +531,6 @@ Http2Session::Http2Session(Environment* env, Http2Session::~Http2Session() { CHECK_EQ(flags_ & SESSION_STATE_HAS_SCOPE, 0); - if (!object().IsEmpty()) - ClearWrap(object()); - persistent().Reset(); - CHECK(persistent().IsEmpty()); DEBUG_HTTP2SESSION(this, "freeing nghttp2 session"); nghttp2_session_del(session_); } @@ -1706,6 +1700,14 @@ void Http2Session::OnStreamRead(ssize_t nread, const uv_buf_t& buf) { stream_buf_ = uv_buf_init(nullptr, 0); } +bool Http2Session::HasWritesOnSocketForStream(Http2Stream* stream) { + for (const nghttp2_stream_write& wr : outgoing_buffers_) { + if (wr.req_wrap != nullptr && wr.req_wrap->stream() == stream) + return true; + } + return false; +} + // Every Http2Session session is tightly bound to a single i/o StreamBase // (typically a net.Socket or tls.TLSSocket). The lifecycle of the two is // tightly coupled with all data transfer between the two happening at the @@ -1759,15 +1761,11 @@ Http2Stream::Http2Stream( Http2Stream::~Http2Stream() { + DEBUG_HTTP2STREAM(this, "tearing down stream"); if (session_ != nullptr) { session_->RemoveStream(this); session_ = nullptr; } - - if (!object().IsEmpty()) - ClearWrap(object()); - persistent().Reset(); - CHECK(persistent().IsEmpty()); } // Notify the Http2Stream that a new block of HEADERS is being processed. @@ -1845,7 +1843,7 @@ inline void Http2Stream::Destroy() { Http2Stream* stream = static_cast(data); // Free any remaining outgoing data chunks here. This should be done // here because it's possible for destroy to have been called while - // we still have qeueued outbound writes. + // we still have queued outbound writes. while (!stream->queue_.empty()) { nghttp2_stream_write& head = stream->queue_.front(); if (head.req_wrap != nullptr) @@ -1853,7 +1851,11 @@ inline void Http2Stream::Destroy() { stream->queue_.pop(); } - delete stream; + // We can destroy the stream now if there are no writes for it + // already on the socket. Otherwise, we'll wait for the garbage collector + // to take care of cleaning up. + if (!stream->session()->HasWritesOnSocketForStream(stream)) + delete stream; }, this, this->object()); statistics_.end_time = uv_hrtime(); @@ -2192,6 +2194,17 @@ ssize_t Http2Stream::Provider::Stream::OnRead(nghttp2_session* handle, size_t amount = 0; // amount of data being sent in this data frame. + // Remove all empty chunks from the head of the queue. + // This is done here so that .write('', cb) is still a meaningful way to + // find out when the HTTP2 stream wants to consume data, and because the + // StreamBase API allows empty input chunks. + while (!stream->queue_.empty() && stream->queue_.front().buf.len == 0) { + WriteWrap* finished = stream->queue_.front().req_wrap; + stream->queue_.pop(); + if (finished != nullptr) + finished->Done(0); + } + if (!stream->queue_.empty()) { DEBUG_HTTP2SESSION2(session, "stream %d has pending outbound data", id); amount = std::min(stream->available_outbound_length_, length); @@ -2205,7 +2218,8 @@ ssize_t Http2Stream::Provider::Stream::OnRead(nghttp2_session* handle, } } - if (amount == 0 && stream->IsWritable() && stream->queue_.empty()) { + if (amount == 0 && stream->IsWritable()) { + CHECK(stream->queue_.empty()); DEBUG_HTTP2SESSION2(session, "deferring stream %d", id); return NGHTTP2_ERR_DEFERRED; } @@ -2772,8 +2786,6 @@ Http2Session::Http2Ping::Http2Ping( Http2Session::Http2Ping::~Http2Ping() { if (!object().IsEmpty()) ClearWrap(object()); - persistent().Reset(); - CHECK(persistent().IsEmpty()); } void Http2Session::Http2Ping::Send(uint8_t* payload) { diff --git a/src/node_http2.h b/src/node_http2.h index 217c19c09287af..8f6662a0160bec 100644 --- a/src/node_http2.h +++ b/src/node_http2.h @@ -878,6 +878,9 @@ class Http2Session : public AsyncWrap, public StreamListener { // Removes a stream instance from this session inline void RemoveStream(Http2Stream* stream); + // Indicates whether there currently exist outgoing buffers for this stream. + bool HasWritesOnSocketForStream(Http2Stream* stream); + // Write data to the session inline ssize_t Write(const uv_buf_t* bufs, size_t nbufs); diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index d4044f8bbeea7b..207310f4068f43 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -157,7 +157,6 @@ class Parser : public AsyncWrap, public StreamListener { ~Parser() override { ClearWrap(object()); - persistent().Reset(); } diff --git a/src/node_internals.h b/src/node_internals.h index ced92da3216a28..af716c83997c78 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -25,6 +25,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include "node.h" +#include "node_persistent.h" #include "util-inl.h" #include "env-inl.h" #include "uv.h" @@ -214,7 +215,7 @@ class Environment; template inline v8::Local PersistentToLocal( v8::Isolate* isolate, - const v8::Persistent& persistent); + const Persistent& persistent); // Creates a new context with Node.js-specific tweaks. Currently, it removes // the `v8BreakIterator` property from the global `Intl` object if present. diff --git a/src/node_perf.cc b/src/node_perf.cc index 8ee805a8382c4e..1f7f127a837527 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -3,6 +3,10 @@ #include +#ifdef __POSIX__ +#include // gettimeofday +#endif + namespace node { namespace performance { @@ -21,13 +25,38 @@ using v8::Object; using v8::String; using v8::Value; +// Microseconds in a second, as a float. +#define MICROS_PER_SEC 1e6 +// Microseconds in a millisecond, as a float. +#define MICROS_PER_MILLIS 1e3 + +// https://w3c.github.io/hr-time/#dfn-time-origin const uint64_t timeOrigin = PERFORMANCE_NOW(); +// https://w3c.github.io/hr-time/#dfn-time-origin-timestamp +const double timeOriginTimestamp = GetCurrentTimeInMicroseconds(); uint64_t performance_node_start; uint64_t performance_v8_start; uint64_t performance_last_gc_start_mark_ = 0; v8::GCType performance_last_gc_type_ = v8::GCType::kGCTypeAll; +double GetCurrentTimeInMicroseconds() { +#ifdef _WIN32 +// The difference between the Unix Epoch and the Windows Epoch in 100-ns ticks. +#define TICKS_TO_UNIX_EPOCH 116444736000000000LL + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + uint64_t filetime_int = static_cast(ft.dwHighDateTime) << 32 | + ft.dwLowDateTime; + // FILETIME is measured in terms of 100 ns. Convert that to 1 us (1000 ns). + return (filetime_int - TICKS_TO_UNIX_EPOCH) / 10.; +#else + struct timeval tp; + gettimeofday(&tp, nullptr); + return MICROS_PER_SEC * tp.tv_sec + tp.tv_usec; +#endif +} + // Initialize the performance entry object properties inline void InitObject(const PerformanceEntry& entry, Local obj) { Environment* env = entry.env(); @@ -372,6 +401,12 @@ void Init(Local target, v8::Number::New(isolate, timeOrigin / 1e6), attr).ToChecked(); + target->DefineOwnProperty( + context, + FIXED_ONE_BYTE_STRING(isolate, "timeOriginTimestamp"), + v8::Number::New(isolate, timeOriginTimestamp / MICROS_PER_MILLIS), + attr).ToChecked(); + target->DefineOwnProperty(context, env->constants_string(), constants, diff --git a/src/node_perf.h b/src/node_perf.h index f1b182b4e3dcc7..fbb9b2ad0414b1 100644 --- a/src/node_perf.h +++ b/src/node_perf.h @@ -22,6 +22,10 @@ using v8::Local; using v8::Object; using v8::Value; +extern const uint64_t timeOrigin; + +double GetCurrentTimeInMicroseconds(); + static inline PerformanceMilestone ToPerformanceMilestoneEnum(const char* str) { #define V(name, label) \ if (strcmp(str, label) == 0) return NODE_PERFORMANCE_MILESTONE_##name; @@ -77,11 +81,11 @@ class PerformanceEntry { return ToPerformanceEntryTypeEnum(type().c_str()); } - double startTime() const { return startTime_ / 1e6; } + double startTime() const { return startTimeNano() / 1e6; } double duration() const { return durationNano() / 1e6; } - uint64_t startTimeNano() const { return startTime_; } + uint64_t startTimeNano() const { return startTime_ - timeOrigin; } uint64_t durationNano() const { return endTime_ - startTime_; } diff --git a/src/node_perf_common.h b/src/node_perf_common.h index 435a4cffe5a753..7ff57359ba5cb8 100644 --- a/src/node_perf_common.h +++ b/src/node_perf_common.h @@ -4,6 +4,7 @@ #include "node.h" #include "v8.h" +#include #include #include @@ -76,7 +77,10 @@ class performance_state { isolate, offsetof(performance_state_internal, observers), NODE_PERFORMANCE_ENTRY_TYPE_INVALID, - root) {} + root) { + for (size_t i = 0; i < milestones.Length(); i++) + milestones[i] = -1.; + } AliasedBuffer root; AliasedBuffer milestones; diff --git a/src/node_persistent.h b/src/node_persistent.h new file mode 100644 index 00000000000000..762842dd4bd373 --- /dev/null +++ b/src/node_persistent.h @@ -0,0 +1,30 @@ +#ifndef SRC_NODE_PERSISTENT_H_ +#define SRC_NODE_PERSISTENT_H_ + +#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#include "v8.h" + +namespace node { + +template +struct ResetInDestructorPersistentTraits { + static const bool kResetInDestructor = true; + template + // Disallow copy semantics by leaving this unimplemented. + inline static void Copy( + const v8::Persistent&, + v8::Persistent>*); +}; + +// v8::Persistent does not reset the object slot in its destructor. That is +// acknowledged as a flaw in the V8 API and expected to change in the future +// but for now node::Persistent is the easier and safer alternative. +template +using Persistent = v8::Persistent>; + +} // namespace node + +#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS + +#endif // SRC_NODE_PERSISTENT_H_ diff --git a/src/node_version.h b/src/node_version.h index 72fbe4f3663a46..6cffbaa2593d77 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 9 -#define NODE_MINOR_VERSION 7 -#define NODE_PATCH_VERSION 2 +#define NODE_MINOR_VERSION 8 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 8ef4383e0355de..388630d507a433 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -46,7 +46,6 @@ using v8::HandleScope; using v8::Local; using v8::Number; using v8::Object; -using v8::Persistent; using v8::String; using v8::Uint32Array; using v8::Value; diff --git a/src/req_wrap-inl.h b/src/req_wrap-inl.h index 4a7984e649c733..11b1389fa0e771 100644 --- a/src/req_wrap-inl.h +++ b/src/req_wrap-inl.h @@ -25,7 +25,6 @@ template ReqWrap::~ReqWrap() { CHECK_EQ(req_.data, this); // Assert that someone has called Dispatched(). CHECK_EQ(false, persistent().IsEmpty()); - persistent().Reset(); } template diff --git a/src/stream_base-inl.h b/src/stream_base-inl.h index b479e04bae4c8a..81adf7a866b927 100644 --- a/src/stream_base-inl.h +++ b/src/stream_base-inl.h @@ -220,13 +220,11 @@ inline StreamWriteResult StreamBase::Write( ClearError(); } - req_wrap_obj->Set(env->async(), v8::Boolean::New(env->isolate(), async)); - return StreamWriteResult { async, err, req_wrap }; } -template -SimpleShutdownWrap::SimpleShutdownWrap( +template +SimpleShutdownWrap::SimpleShutdownWrap( StreamBase* stream, v8::Local req_wrap_obj) : ShutdownWrap(stream, req_wrap_obj), @@ -236,14 +234,9 @@ SimpleShutdownWrap::SimpleShutdownWrap( Wrap(req_wrap_obj, static_cast(this)); } -template -SimpleShutdownWrap::~SimpleShutdownWrap() { +template +SimpleShutdownWrap::~SimpleShutdownWrap() { ClearWrap(static_cast(this)->object()); - if (kResetPersistent) { - auto& persistent = static_cast(this)->persistent(); - CHECK_EQ(persistent.IsEmpty(), false); - persistent.Reset(); - } } inline ShutdownWrap* StreamBase::CreateShutdownWrap( @@ -251,8 +244,8 @@ inline ShutdownWrap* StreamBase::CreateShutdownWrap( return new SimpleShutdownWrap(this, object); } -template -SimpleWriteWrap::SimpleWriteWrap( +template +SimpleWriteWrap::SimpleWriteWrap( StreamBase* stream, v8::Local req_wrap_obj) : WriteWrap(stream, req_wrap_obj), @@ -262,14 +255,9 @@ SimpleWriteWrap::SimpleWriteWrap( Wrap(req_wrap_obj, static_cast(this)); } -template -SimpleWriteWrap::~SimpleWriteWrap() { +template +SimpleWriteWrap::~SimpleWriteWrap() { ClearWrap(static_cast(this)->object()); - if (kResetPersistent) { - auto& persistent = static_cast(this)->persistent(); - CHECK_EQ(persistent.IsEmpty(), false); - persistent.Reset(); - } } inline WriteWrap* StreamBase::CreateWriteWrap( diff --git a/src/stream_base.cc b/src/stream_base.cc index 9ad9fd5bcb4a46..1d1d324841537f 100644 --- a/src/stream_base.cc +++ b/src/stream_base.cc @@ -14,6 +14,7 @@ namespace node { using v8::Array; +using v8::Boolean; using v8::Context; using v8::FunctionCallbackInfo; using v8::HandleScope; @@ -56,6 +57,20 @@ int StreamBase::Shutdown(const FunctionCallbackInfo& args) { return Shutdown(req_wrap_obj); } +inline void SetWriteResultPropertiesOnWrapObject( + Environment* env, + Local req_wrap_obj, + const StreamWriteResult& res, + size_t bytes) { + req_wrap_obj->Set( + env->context(), + env->bytes_string(), + Number::New(env->isolate(), bytes)).FromJust(); + req_wrap_obj->Set( + env->context(), + env->async(), + Boolean::New(env->isolate(), res.async)).FromJust(); +} int StreamBase::Writev(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -150,7 +165,7 @@ int StreamBase::Writev(const FunctionCallbackInfo& args) { } StreamWriteResult res = Write(*bufs, count, nullptr, req_wrap_obj); - req_wrap_obj->Set(env->bytes_string(), Number::New(env->isolate(), bytes)); + SetWriteResultPropertiesOnWrapObject(env, req_wrap_obj, res, bytes); if (res.wrap != nullptr && storage) { res.wrap->SetAllocatedStorage(storage.release(), storage_size); } @@ -178,9 +193,7 @@ int StreamBase::WriteBuffer(const FunctionCallbackInfo& args) { if (res.async) req_wrap_obj->Set(env->context(), env->buffer_string(), args[1]).FromJust(); - req_wrap_obj->Set(env->context(), env->bytes_string(), - Integer::NewFromUnsigned(env->isolate(), buf.len)) - .FromJust(); + SetWriteResultPropertiesOnWrapObject(env, req_wrap_obj, res, buf.len); return res.err; } @@ -286,10 +299,7 @@ int StreamBase::WriteString(const FunctionCallbackInfo& args) { StreamWriteResult res = Write(&buf, 1, send_handle, req_wrap_obj); - req_wrap_obj->Set(env->context(), env->bytes_string(), - Integer::NewFromUnsigned(env->isolate(), data_size)) - .FromJust(); - + SetWriteResultPropertiesOnWrapObject(env, req_wrap_obj, res, data_size); if (res.wrap != nullptr) { res.wrap->SetAllocatedStorage(data.release(), data_size); } diff --git a/src/stream_base.h b/src/stream_base.h index 59b8ee7b7221f0..8af05059f49e47 100644 --- a/src/stream_base.h +++ b/src/stream_base.h @@ -322,7 +322,7 @@ class StreamBase : public StreamResource { // `OtherBase` must have a constructor that matches the `AsyncWrap` // constructors’s (Environment*, Local, AsyncWrap::Provider) signature // and be a subclass of `AsyncWrap`. -template +template class SimpleShutdownWrap : public ShutdownWrap, public OtherBase { public: SimpleShutdownWrap(StreamBase* stream, @@ -333,7 +333,7 @@ class SimpleShutdownWrap : public ShutdownWrap, public OtherBase { size_t self_size() const override { return sizeof(*this); } }; -template +template class SimpleWriteWrap : public WriteWrap, public OtherBase { public: SimpleWriteWrap(StreamBase* stream, diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index e1df9edd39e151..27fe48d1165c75 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -264,8 +264,8 @@ void LibuvStreamWrap::SetBlocking(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(uv_stream_set_blocking(wrap->stream(), enable)); } -typedef SimpleShutdownWrap, false> LibuvShutdownWrap; -typedef SimpleWriteWrap, false> LibuvWriteWrap; +typedef SimpleShutdownWrap> LibuvShutdownWrap; +typedef SimpleWriteWrap> LibuvWriteWrap; ShutdownWrap* LibuvStreamWrap::CreateShutdownWrap(Local object) { return new LibuvShutdownWrap(this, object); diff --git a/src/tcp_wrap.cc b/src/tcp_wrap.cc index a0a58fb1b5cc8d..61b08217b8f129 100644 --- a/src/tcp_wrap.cc +++ b/src/tcp_wrap.cc @@ -174,11 +174,6 @@ TCPWrap::TCPWrap(Environment* env, Local object, ProviderType provider) } -TCPWrap::~TCPWrap() { - CHECK(persistent().IsEmpty()); -} - - void TCPWrap::SetNoDelay(const FunctionCallbackInfo& args) { TCPWrap* wrap; ASSIGN_OR_RETURN_UNWRAP(&wrap, diff --git a/src/tcp_wrap.h b/src/tcp_wrap.h index a7f6b1901981f6..2ab50f1fdcdfab 100644 --- a/src/tcp_wrap.h +++ b/src/tcp_wrap.h @@ -55,7 +55,6 @@ class TCPWrap : public ConnectionWrap { TCPWrap(Environment* env, v8::Local object, ProviderType provider); - ~TCPWrap(); static void New(const v8::FunctionCallbackInfo& args); static void SetNoDelay(const v8::FunctionCallbackInfo& args); diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index 4d3295ab39eed8..7ff49522438d65 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -86,12 +86,7 @@ TLSWrap::TLSWrap(Environment* env, TLSWrap::~TLSWrap() { enc_in_ = nullptr; enc_out_ = nullptr; - sc_ = nullptr; - -#ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB - sni_context_.Reset(); -#endif // SSL_CTRL_SET_TLSEXT_SERVERNAME_CB } @@ -565,7 +560,12 @@ int TLSWrap::DoWrite(WriteWrap* w, size_t count, uv_stream_t* send_handle) { CHECK_EQ(send_handle, nullptr); - CHECK_NE(ssl_, nullptr); + + if (ssl_ == nullptr) { + ClearError(); + error_ = "Write after DestroySSL"; + return UV_EPROTO; + } bool empty = true; @@ -605,12 +605,6 @@ int TLSWrap::DoWrite(WriteWrap* w, return 0; } - if (ssl_ == nullptr) { - ClearError(); - error_ = "Write after DestroySSL"; - return UV_EPROTO; - } - crypto::MarkPopErrorOnReturn mark_pop_error_on_return; int written = 0; @@ -853,7 +847,6 @@ int TLSWrap::SelectSNIContextCallback(SSL* s, int* ad, void* arg) { return SSL_TLSEXT_ERR_NOACK; } - p->sni_context_.Reset(); p->sni_context_.Reset(env->isolate(), ctx); SecureContext* sc = Unwrap(ctx.As()); diff --git a/src/tracing/agent.cc b/src/tracing/agent.cc index 4514a0fce1f0a2..71e53e787a464e 100644 --- a/src/tracing/agent.cc +++ b/src/tracing/agent.cc @@ -13,11 +13,12 @@ namespace tracing { using v8::platform::tracing::TraceConfig; using std::string; -Agent::Agent() { +Agent::Agent(const std::string& log_file_pattern) { int err = uv_loop_init(&tracing_loop_); CHECK_EQ(err, 0); - NodeTraceWriter* trace_writer = new NodeTraceWriter(&tracing_loop_); + NodeTraceWriter* trace_writer = new NodeTraceWriter( + log_file_pattern, &tracing_loop_); TraceBuffer* trace_buffer = new NodeTraceBuffer( NodeTraceBuffer::kBufferChunks, trace_writer, &tracing_loop_); tracing_controller_ = new TracingController(); diff --git a/src/tracing/agent.h b/src/tracing/agent.h index bd8e90004b015c..bbb94a0c10d43e 100644 --- a/src/tracing/agent.h +++ b/src/tracing/agent.h @@ -12,7 +12,7 @@ using v8::platform::tracing::TracingController; class Agent { public: - Agent(); + explicit Agent(const std::string& log_file_pattern); void Start(const std::string& enabled_categories); void Stop(); diff --git a/src/tracing/node_trace_writer.cc b/src/tracing/node_trace_writer.cc index 9293e9cb8f7b4a..15c59fc98a8747 100644 --- a/src/tracing/node_trace_writer.cc +++ b/src/tracing/node_trace_writer.cc @@ -8,8 +8,9 @@ namespace node { namespace tracing { -NodeTraceWriter::NodeTraceWriter(uv_loop_t* tracing_loop) - : tracing_loop_(tracing_loop) { +NodeTraceWriter::NodeTraceWriter(const std::string& log_file_pattern, + uv_loop_t* tracing_loop) + : tracing_loop_(tracing_loop), log_file_pattern_(log_file_pattern) { flush_signal_.data = this; int err = uv_async_init(tracing_loop_, &flush_signal_, FlushSignalCb); CHECK_EQ(err, 0); @@ -54,12 +55,27 @@ NodeTraceWriter::~NodeTraceWriter() { } } +void replace_substring(std::string* target, + const std::string& search, + const std::string& insert) { + size_t pos = target->find(search); + for (; pos != std::string::npos; pos = target->find(search, pos)) { + target->replace(pos, search.size(), insert); + pos += insert.size(); + } +} + void NodeTraceWriter::OpenNewFileForStreaming() { ++file_num_; uv_fs_t req; - std::ostringstream log_file; - log_file << "node_trace." << file_num_ << ".log"; - fd_ = uv_fs_open(tracing_loop_, &req, log_file.str().c_str(), + + // Evaluate a JS-style template string, it accepts the values ${pid} and + // ${rotation} + std::string filepath(log_file_pattern_); + replace_substring(&filepath, "${pid}", std::to_string(uv_os_getpid())); + replace_substring(&filepath, "${rotation}", std::to_string(file_num_)); + + fd_ = uv_fs_open(tracing_loop_, &req, filepath.c_str(), O_CREAT | O_WRONLY | O_TRUNC, 0644, nullptr); CHECK_NE(fd_, -1); uv_fs_req_cleanup(&req); diff --git a/src/tracing/node_trace_writer.h b/src/tracing/node_trace_writer.h index 5abb3e2a617d96..9211790777bddb 100644 --- a/src/tracing/node_trace_writer.h +++ b/src/tracing/node_trace_writer.h @@ -16,7 +16,8 @@ using v8::platform::tracing::TraceWriter; class NodeTraceWriter : public TraceWriter { public: - explicit NodeTraceWriter(uv_loop_t* tracing_loop); + explicit NodeTraceWriter(const std::string& log_file_pattern, + uv_loop_t* tracing_loop); ~NodeTraceWriter(); void AppendTraceEvent(TraceObject* trace_event) override; @@ -62,6 +63,7 @@ class NodeTraceWriter : public TraceWriter { int highest_request_id_completed_ = 0; int total_traces_ = 0; int file_num_ = 0; + const std::string& log_file_pattern_; std::ostringstream stream_; TraceWriter* json_trace_writer_ = nullptr; bool exited_ = false; diff --git a/src/util-inl.h b/src/util-inl.h index c5a25c91ffb088..d07cfea9227fbe 100644 --- a/src/util-inl.h +++ b/src/util-inl.h @@ -168,7 +168,7 @@ inline ContainerOfHelper ContainerOf(Inner Outer::*field, template inline v8::Local PersistentToLocal( v8::Isolate* isolate, - const v8::Persistent& persistent) { + const Persistent& persistent) { if (persistent.IsWeak()) { return WeakPersistentToLocal(isolate, persistent); } else { @@ -178,15 +178,15 @@ inline v8::Local PersistentToLocal( template inline v8::Local StrongPersistentToLocal( - const v8::Persistent& persistent) { + const Persistent& persistent) { return *reinterpret_cast*>( - const_cast*>(&persistent)); + const_cast*>(&persistent)); } template inline v8::Local WeakPersistentToLocal( v8::Isolate* isolate, - const v8::Persistent& persistent) { + const Persistent& persistent) { return v8::Local::New(isolate, persistent); } diff --git a/src/util.h b/src/util.h index 21c566a4ca6cd6..7c679952d5fb1f 100644 --- a/src/util.h +++ b/src/util.h @@ -24,6 +24,7 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS +#include "node_persistent.h" #include "v8.h" #include @@ -218,7 +219,7 @@ inline ContainerOfHelper ContainerOf(Inner Outer::*field, template inline v8::Local PersistentToLocal( v8::Isolate* isolate, - const v8::Persistent& persistent); + const Persistent& persistent); // Unchecked conversion from a non-weak Persistent to Local, // use with care! @@ -227,12 +228,12 @@ inline v8::Local PersistentToLocal( // scope, it will destroy the reference to the object. template inline v8::Local StrongPersistentToLocal( - const v8::Persistent& persistent); + const Persistent& persistent); template inline v8::Local WeakPersistentToLocal( v8::Isolate* isolate, - const v8::Persistent& persistent); + const Persistent& persistent); // Convenience wrapper around v8::String::NewFromOneByte(). inline v8::Local OneByteString(v8::Isolate* isolate, diff --git a/test/addons-napi/test_object/test_object.c b/test/addons-napi/test_object/test_object.c index 49a90dd3f99f45..ccf1573114a6f1 100644 --- a/test/addons-napi/test_object/test_object.c +++ b/test/addons-napi/test_object/test_object.c @@ -1,7 +1,6 @@ #include #include "../common.h" #include -#include static int test_value = 3; @@ -199,9 +198,7 @@ napi_value Wrap(napi_env env, napi_callback_info info) { napi_value arg; NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &arg, NULL, NULL)); - int32_t* data = malloc(sizeof(int32_t)); - *data = test_value; - NAPI_CALL(env, napi_wrap(env, arg, data, NULL, NULL, NULL)); + NAPI_CALL(env, napi_wrap(env, arg, &test_value, NULL, NULL, NULL)); return NULL; } diff --git a/test/addons-napi/test_reference/test_reference.c b/test/addons-napi/test_reference/test_reference.c index f0ede447814e17..f34adc6693b6fe 100644 --- a/test/addons-napi/test_reference/test_reference.c +++ b/test/addons-napi/test_reference/test_reference.c @@ -1,6 +1,5 @@ #include #include "../common.h" -#include static int test_value = 1; static int finalize_count = 0; @@ -13,7 +12,9 @@ napi_value GetFinalizeCount(napi_env env, napi_callback_info info) { } void FinalizeExternal(napi_env env, void* data, void* hint) { - free(data); + int *actual_value = data; + NAPI_ASSERT_RETURN_VOID(env, actual_value == &test_value, + "The correct pointer was passed to the finalizer"); finalize_count++; } @@ -33,13 +34,10 @@ napi_value CreateExternal(napi_env env, napi_callback_info info) { } napi_value CreateExternalWithFinalize(napi_env env, napi_callback_info info) { - int* data = malloc(sizeof(int)); - *data = test_value; - napi_value result; NAPI_CALL(env, napi_create_external(env, - data, + &test_value, FinalizeExternal, NULL, /* finalize_hint */ &result)); diff --git a/test/addons/hello-world/binding.cc b/test/addons/hello-world/binding.cc index 944f5631956d15..ba6a22d7196d26 100644 --- a/test/addons/hello-world/binding.cc +++ b/test/addons/hello-world/binding.cc @@ -6,8 +6,12 @@ void Method(const v8::FunctionCallbackInfo& args) { args.GetReturnValue().Set(v8::String::NewFromUtf8(isolate, "world")); } -void init(v8::Local exports) { +#define CONCAT(a, b) CONCAT_HELPER(a, b) +#define CONCAT_HELPER(a, b) a##b +#define INITIALIZER CONCAT(node_register_module_v, NODE_MODULE_VERSION) + +extern "C" NODE_MODULE_EXPORT void INITIALIZER(v8::Local exports, + v8::Local module, + v8::Local context) { NODE_SET_METHOD(exports, "hello", Method); } - -NODE_MODULE(NODE_GYP_MODULE_NAME, init) diff --git a/test/addons/symlinked-module/test.js b/test/addons/symlinked-module/test.js index 53306399cb520b..e8c26544f2a38c 100644 --- a/test/addons/symlinked-module/test.js +++ b/test/addons/symlinked-module/test.js @@ -19,7 +19,7 @@ const addonPath = path.join(__dirname, 'build', common.buildType); const addonLink = path.join(tmpdir.path, 'addon'); try { - fs.symlinkSync(addonPath, addonLink); + fs.symlinkSync(addonPath, addonLink, 'dir'); } catch (err) { if (err.code !== 'EPERM') throw err; common.skip('module identity test (no privs for symlinks)'); diff --git a/test/async-hooks/test-udpsendwrap.js b/test/async-hooks/test-udpsendwrap.js index 10deaca8452d3d..d8ab77730f4921 100644 --- a/test/async-hooks/test-udpsendwrap.js +++ b/test/async-hooks/test-udpsendwrap.js @@ -18,7 +18,7 @@ const sock = dgram function onlistening() { sock.send( - new Buffer(2), 0, 2, sock.address().port, + Buffer.alloc(2), 0, 2, sock.address().port, undefined, common.mustCall(onsent)); // init not called synchronously because dns lookup always wraps diff --git a/test/common/README.md b/test/common/README.md index c6742deb691587..dda747755f0306 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -21,7 +21,7 @@ The `benchmark` module is used by tests to run benchmarks. ### runBenchmark(name, args, env) -* `name` [<String>] Name of benchmark suite to be run. +* `name` [<string>] Name of benchmark suite to be run. * `args` [<Array>] Array of environment variable key/value pairs (ex: `n=1`) to be applied via `--set`. * `env` [<Object>] Environment variables to be applied during the run. @@ -41,12 +41,12 @@ Takes `whitelist` and concats that with predefined `knownGlobals`. A stream to push an array into a REPL ### busyLoop(time) -* `time` [<Number>] +* `time` [<number>] Blocks for `time` amount of time. ### canCreateSymLink() -* return [<Boolean>] +* return [<boolean>] Checks whether the current running process can create symlinks. On Windows, this returns `false` if the process running doesn't have privileges to create @@ -67,7 +67,7 @@ failures. Platform normalizes the `dd` command ### enoughTestMem -* [<Boolean>] +* [<boolean>] Indicates if there is more than 1gb of total memory. @@ -76,17 +76,17 @@ Indicates if there is more than 1gb of total memory. * `settings` [<Object>] that must contain the `code` property plus any of the other following properties (some properties only apply for `AssertionError`): - * `code` [<String>] + * `code` [<string>] expected error must have this value for its `code` property. * `type` [<Function>] expected error must be an instance of `type` and must be an Error subclass. - * `message` [<String>] or [<RegExp>] + * `message` [<string>] or [<RegExp>] if a string is provided for `message`, expected error must have it for its `message` property; if a regular expression is provided for `message`, the regular expression must match the `message` property of the expected error. - * `name` [<String>] + * `name` [<string>] expected error must have this value for its `name` property. - * `generatedMessage` [<String>] + * `generatedMessage` [<string>] (`AssertionError` only) expected error must have this value for its `generatedMessage` property. * `actual` <any> @@ -98,7 +98,7 @@ Indicates if there is more than 1gb of total memory. * `operator` <any> (`AssertionError` only) expected error must have this value for its `operator` property. -* `exact` [<Number>] default = 1 +* `exact` [<number>] default = 1 * return [<Function>] If `fn` is provided, it will be passed to `assert.throws` as first argument @@ -109,14 +109,14 @@ Indicates if there is more than 1gb of total memory. test is complete, then the test will fail. ### expectWarning(name, expected) -* `name` [<String>] -* `expected` [<String>] | [<Array>] +* `name` [<string>] +* `expected` [<string>] | [<Array>] Tests whether `name` and `expected` are part of a raised warning. ### fileExists(pathname) -* pathname [<String>] -* return [<Boolean>] +* pathname [<string>] +* return [<boolean>] Checks if `pathname` exists @@ -135,42 +135,42 @@ consisting of all `ArrayBufferView` and an `ArrayBuffer`. ### getCallSite(func) * `func` [<Function>] -* return [<String>] +* return [<string>] Returns the file name and line number for the provided Function. ### globalCheck -* [<Boolean>] +* [<boolean>] Set to `false` if the test should not check for global leaks. ### hasCrypto -* [<Boolean>] +* [<boolean>] Indicates whether OpenSSL is available. ### hasFipsCrypto -* [<Boolean>] +* [<boolean>] Indicates `hasCrypto` and `crypto` with fips. ### hasIntl -* [<Boolean>] +* [<boolean>] Indicates if [internationalization] is supported. ### hasSmallICU -* [<Boolean>] +* [<boolean>] Indicates `hasIntl` and `small-icu` are supported. ### hasIPv6 -* [<Boolean>] +* [<boolean>] Indicates whether `IPv6` is supported on this platform. ### hasMultiLocalhost -* [<Boolean>] +* [<boolean>] Indicates if there are multiple localhosts available. @@ -193,58 +193,58 @@ be passed to `listener`. What's more, `process.stdout.writeTimes` is a count of the number of calls. ### inFreeBSDJail -* [<Boolean>] +* [<boolean>] Checks whether free BSD Jail is true or false. ### isAIX -* [<Boolean>] +* [<boolean>] Platform check for Advanced Interactive eXecutive (AIX). ### isAlive(pid) -* `pid` [<Number>] -* return [<Boolean>] +* `pid` [<number>] +* return [<boolean>] Attempts to 'kill' `pid` ### isFreeBSD -* [<Boolean>] +* [<boolean>] Platform check for Free BSD. ### isLinux -* [<Boolean>] +* [<boolean>] Platform check for Linux. ### isLinuxPPCBE -* [<Boolean>] +* [<boolean>] Platform check for Linux on PowerPC. ### isOSX -* [<Boolean>] +* [<boolean>] Platform check for macOS. ### isSunOS -* [<Boolean>] +* [<boolean>] Platform check for SunOS. ### isWindows -* [<Boolean>] +* [<boolean>] Platform check for Windows. ### isWOW64 -* [<Boolean>] +* [<boolean>] Platform check for Windows 32-bit on Windows 64-bit. ### isCPPSymbolsNotMapped -* [<Boolean>] +* [<boolean>] Platform check for C++ symbols are mapped or not. @@ -254,7 +254,7 @@ Platform check for C++ symbols are mapped or not. Indicates whether any globals are not on the `knownGlobals` list. ### localhostIPv4 -* [<String>] +* [<string>] IP of `localhost`. @@ -265,7 +265,7 @@ Array of IPV6 representations for `localhost`. ### mustCall([fn][, exact]) * `fn` [<Function>] default = () => {} -* `exact` [<Number>] default = 1 +* `exact` [<number>] default = 1 * return [<Function>] Returns a function that calls `fn`. If the returned function has not been called @@ -276,7 +276,7 @@ If `fn` is not provided, an empty function will be used. ### mustCallAsync([fn][, exact]) * `fn` [<Function>] -* `exact` [<Number>] default = 1 +* `exact` [<number>] default = 1 * return [<Function>] The same as `mustCall()`, except that it is also checked that the Promise @@ -287,7 +287,7 @@ function, if necessary wrapped as a promise. ### mustCallAtLeast([fn][, minimum]) * `fn` [<Function>] default = () => {} -* `minimum` [<Number>] default = 1 +* `minimum` [<number>] default = 1 * return [<Function>] Returns a function that calls `fn`. If the returned function has not been called @@ -297,44 +297,44 @@ fail. If `fn` is not provided, an empty function will be used. ### mustNotCall([msg]) -* `msg` [<String>] default = 'function should not have been called' +* `msg` [<string>] default = 'function should not have been called' * return [<Function>] Returns a function that triggers an `AssertionError` if it is invoked. `msg` is used as the error message for the `AssertionError`. ### nodeProcessAborted(exitCode, signal) -* `exitCode` [<Number>] -* `signal` [<String>] -* return [<Boolean>] +* `exitCode` [<number>] +* `signal` [<string>] +* return [<boolean>] Returns `true` if the exit code `exitCode` and/or signal name `signal` represent the exit code and/or signal name of a node process that aborted, `false` otherwise. ### opensslCli -* [<Boolean>] +* [<boolean>] Indicates whether 'opensslCli' is supported. ### platformTimeout(ms) -* `ms` [<Number>] -* return [<Number>] +* `ms` [<number>] +* return [<number>] Platform normalizes timeout. ### PIPE -* [<String>] +* [<string>] Path to the test socket. ### PORT -* [<Number>] +* [<number>] A port number for tests to use if one is needed. ### printSkipMessage(msg) -* `msg` [<String>] +* `msg` [<string>] Logs '1..0 # Skipped: ' + `msg` @@ -349,7 +349,7 @@ Restore the original `process.stdout.write`. Used to restore `stdout` to its original state after calling [`common.hijackStdOut()`][]. ### rootDir -* [<String>] +* [<string>] Path to the 'root' directory. either `/` or `c:\\` (windows) @@ -359,7 +359,7 @@ Path to the 'root' directory. either `/` or `c:\\` (windows) Path to the project directory. ### skip(msg) -* `msg` [<String>] +* `msg` [<string>] Logs '1..0 # Skipped: ' + `msg` and exits with exit code `0`. @@ -432,8 +432,8 @@ The `DNS` module provides utilities related to the `dns` built-in module. ### errorLookupMock(code, syscall) -* `code` [<String>] Defaults to `dns.mockedErrorCode`. -* `syscall` [<String>] Defaults to `dns.mockedSysCall`. +* `code` [<string>] Defaults to `dns.mockedErrorCode`. +* `syscall` [<string>] Defaults to `dns.mockedSysCall`. * return [<Function>] A mock for the `lookup` option of `net.connect()` that would result in an error @@ -451,7 +451,7 @@ The default `syscall` of errors generated by `errorLookupMock`. ### readDomainFromPacket(buffer, offset) * `buffer` [<Buffer>] -* `offset` [<Number>] +* `offset` [<number>] * return [<Object>] Reads the domain string from a packet and returns an object containing the @@ -467,14 +467,14 @@ the packet depending on the type of packet. ### writeIPv6(ip) -* `ip` [<String>] +* `ip` [<string>] * return [<Buffer>] Reads an IPv6 String and returns a Buffer containing the parts. ### writeDomainName(domain) -* `domain` [<String>] +* `domain` [<string>] * return [<Buffer>] Reads a Domain String and returns a Buffer containing the domain. @@ -502,26 +502,26 @@ files in the `test/fixtures` directory. ### fixtures.fixturesDir -* [<String>] +* [<string>] The absolute path to the `test/fixtures/` directory. ### fixtures.path(...args) -* `...args` [<String>] +* `...args` [<string>] Returns the result of `path.join(fixtures.fixturesDir, ...args)`. ### fixtures.readSync(args[, enc]) -* `args` [<String>] | [<Array>] +* `args` [<string>] | [<Array>] Returns the result of `fs.readFileSync(path.join(fixtures.fixturesDir, ...args), 'enc')`. ### fixtures.readKey(arg[, enc]) -* `arg` [<String>] +* `arg` [<string>] Returns the result of `fs.readFileSync(path.join(fixtures.fixturesDir, 'keys', arg), 'enc')`. @@ -645,26 +645,26 @@ internet-related tests. ### internet.addresses * [<Object>] - * `INET_HOST` [<String>] A generic host that has registered common + * `INET_HOST` [<string>] A generic host that has registered common DNS records, supports both IPv4 and IPv6, and provides basic HTTP/HTTPS services - * `INET4_HOST` [<String>] A host that provides IPv4 services - * `INET6_HOST` [<String>] A host that provides IPv6 services - * `INET4_IP` [<String>] An accessible IPv4 IP, defaults to the + * `INET4_HOST` [<string>] A host that provides IPv4 services + * `INET6_HOST` [<string>] A host that provides IPv6 services + * `INET4_IP` [<string>] An accessible IPv4 IP, defaults to the Google Public DNS IPv4 address - * `INET6_IP` [<String>] An accessible IPv6 IP, defaults to the + * `INET6_IP` [<string>] An accessible IPv6 IP, defaults to the Google Public DNS IPv6 address - * `INVALID_HOST` [<String>] An invalid host that cannot be resolved - * `MX_HOST` [<String>] A host with MX records registered - * `SRV_HOST` [<String>] A host with SRV records registered - * `PTR_HOST` [<String>] A host with PTR records registered - * `NAPTR_HOST` [<String>] A host with NAPTR records registered - * `SOA_HOST` [<String>] A host with SOA records registered - * `CNAME_HOST` [<String>] A host with CNAME records registered - * `NS_HOST` [<String>] A host with NS records registered - * `TXT_HOST` [<String>] A host with TXT records registered - * `DNS4_SERVER` [<String>] An accessible IPv4 DNS server - * `DNS6_SERVER` [<String>] An accessible IPv6 DNS server + * `INVALID_HOST` [<string>] An invalid host that cannot be resolved + * `MX_HOST` [<string>] A host with MX records registered + * `SRV_HOST` [<string>] A host with SRV records registered + * `PTR_HOST` [<string>] A host with PTR records registered + * `NAPTR_HOST` [<string>] A host with NAPTR records registered + * `SOA_HOST` [<string>] A host with SOA records registered + * `CNAME_HOST` [<string>] A host with CNAME records registered + * `NS_HOST` [<string>] A host with NS records registered + * `TXT_HOST` [<string>] A host with TXT records registered + * `DNS4_SERVER` [<string>] An accessible IPv4 DNS server + * `DNS6_SERVER` [<string>] An accessible IPv6 DNS server A set of addresses for internet-related tests. All properties are configurable via `NODE_TEST_*` environment variables. For example, to configure @@ -676,7 +676,7 @@ variable `NODE_TEST_INET_HOST` to a specified host. The `tmpdir` module supports the use of a temporary directory for testing. ### path -* [<String>] +* [<string>] The realpath of the testing temporary directory. @@ -696,13 +696,13 @@ implementation with tests from [<Array>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array [<ArrayBufferView[]>]: https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView -[<Boolean>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type [<Buffer>]: https://nodejs.org/api/buffer.html#buffer_class_buffer [<Function>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function -[<Number>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type [<Object>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object [<RegExp>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp -[<String>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type +[<boolean>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type +[<number>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type +[<string>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type [`common.hijackStdErr()`]: #hijackstderrlistener [`common.hijackStdOut()`]: #hijackstdoutlistener [internationalization]: https://github.com/nodejs/node/wiki/Intl diff --git a/test/common/inspector-helper.js b/test/common/inspector-helper.js index de0723933f573a..7590cfc104b70a 100644 --- a/test/common/inspector-helper.js +++ b/test/common/inspector-helper.js @@ -241,7 +241,7 @@ class InspectorSession { } _isBreakOnLineNotification(message, line, expectedScriptPath) { - if ('Debugger.paused' === message.method) { + if (message.method === 'Debugger.paused') { const callFrame = message.params.callFrames[0]; const location = callFrame.location; const scriptPath = this._scriptsIdsByUrl.get(location.scriptId); @@ -264,7 +264,7 @@ class InspectorSession { _matchesConsoleOutputNotification(notification, type, values) { if (!Array.isArray(values)) values = [ values ]; - if ('Runtime.consoleAPICalled' === notification.method) { + if (notification.method === 'Runtime.consoleAPICalled') { const params = notification.params; if (params.type === type) { let i = 0; diff --git a/test/common/shared-lib-util.js b/test/common/shared-lib-util.js index 7ff7518ac31e6d..baa989824cdc2c 100644 --- a/test/common/shared-lib-util.js +++ b/test/common/shared-lib-util.js @@ -1,5 +1,6 @@ /* eslint-disable required-modules */ 'use strict'; +const common = require('../common'); const path = require('path'); // If node executable is linked to shared lib, need to take care about the @@ -27,3 +28,17 @@ exports.addLibraryPath = function(env) { (env.PATH ? env.PATH + path.delimiter : '') + path.dirname(process.execPath); }; + +// Get the full path of shared lib +exports.getSharedLibPath = function() { + if (common.isWindows) { + return path.join(path.dirname(process.execPath), 'node.dll'); + } else if (common.isOSX) { + return path.join(path.dirname(process.execPath), + `libnode.${process.config.variables.shlib_suffix}`); + } else { + return path.join(path.dirname(process.execPath), + 'lib.target', + `libnode.${process.config.variables.shlib_suffix}`); + } +}; diff --git a/test/es-module/test-esm-cyclic-dynamic-import.mjs b/test/es-module/test-esm-cyclic-dynamic-import.mjs new file mode 100644 index 00000000000000..c8dfff919c2f7e --- /dev/null +++ b/test/es-module/test-esm-cyclic-dynamic-import.mjs @@ -0,0 +1,3 @@ +// Flags: --experimental-modules +import '../common'; +import('./test-esm-cyclic-dynamic-import'); diff --git a/test/es-module/test-esm-main-lookup.mjs b/test/es-module/test-esm-main-lookup.mjs new file mode 100644 index 00000000000000..7c81cb647cff38 --- /dev/null +++ b/test/es-module/test-esm-main-lookup.mjs @@ -0,0 +1,6 @@ +// Flags: --experimental-modules +/* eslint-disable required-modules */ +import assert from 'assert'; +import main from '../fixtures/es-modules/pjson-main'; + +assert.strictEqual(main, 'main'); diff --git a/test/es-module/test-esm-symlink.js b/test/es-module/test-esm-symlink.js index 074230ac06c4b5..232925a52e3f64 100644 --- a/test/es-module/test-esm-symlink.js +++ b/test/es-module/test-esm-symlink.js @@ -37,7 +37,7 @@ try { fs.symlinkSync(real, link_absolute_path); fs.symlinkSync(path.basename(real), link_relative_path); fs.symlinkSync(real, link_ignore_extension); - fs.symlinkSync(path.dirname(real), link_directory); + fs.symlinkSync(path.dirname(real), link_directory, 'dir'); } catch (err) { if (err.code !== 'EPERM') throw err; common.skip('insufficient privileges for symlinks'); diff --git a/test/fixtures/es-module-loaders/example-loader.mjs b/test/fixtures/es-module-loaders/example-loader.mjs index 771273a8d865c1..acb4486edc1288 100644 --- a/test/fixtures/es-module-loaders/example-loader.mjs +++ b/test/fixtures/es-module-loaders/example-loader.mjs @@ -8,7 +8,10 @@ const builtins = new Set( ); const JS_EXTENSIONS = new Set(['.js', '.mjs']); -export function resolve(specifier, parentModuleURL/*, defaultResolve */) { +const baseURL = new url.URL('file://'); +baseURL.pathname = process.cwd() + '/'; + +export function resolve(specifier, parentModuleURL = baseURL /*, defaultResolve */) { if (builtins.has(specifier)) { return { url: specifier, diff --git a/test/fixtures/es-module-loaders/js-loader.mjs b/test/fixtures/es-module-loaders/js-loader.mjs index 79d9774c1d787e..2173b0b503ef45 100644 --- a/test/fixtures/es-module-loaders/js-loader.mjs +++ b/test/fixtures/es-module-loaders/js-loader.mjs @@ -3,7 +3,11 @@ const builtins = new Set( Object.keys(process.binding('natives')).filter(str => /^(?!(?:internal|node|v8)\/)/.test(str)) ) -export function resolve (specifier, base) { + +const baseURL = new _url.URL('file://'); +baseURL.pathname = process.cwd() + '/'; + +export function resolve (specifier, base = baseURL) { if (builtins.has(specifier)) { return { url: specifier, diff --git a/test/fixtures/es-modules/loop.mjs b/test/fixtures/es-modules/loop.mjs index edd111abb9e9b7..1b5cab10edc7bf 100644 --- a/test/fixtures/es-modules/loop.mjs +++ b/test/fixtures/es-modules/loop.mjs @@ -1,6 +1,8 @@ +import { message } from './message'; + var t = 1; var k = 1; -console.log('A message', 5); +console.log(message, 5); while (t > 0) { if (t++ === 1000) { t = 0; diff --git a/test/fixtures/es-modules/message.mjs b/test/fixtures/es-modules/message.mjs new file mode 100644 index 00000000000000..d50f57b7b6a5f7 --- /dev/null +++ b/test/fixtures/es-modules/message.mjs @@ -0,0 +1 @@ +export const message = 'A message'; diff --git a/test/fixtures/es-modules/noext b/test/fixtures/es-modules/noext new file mode 100644 index 00000000000000..f21c9bee6df46a --- /dev/null +++ b/test/fixtures/es-modules/noext @@ -0,0 +1 @@ +exports.cjs = true; \ No newline at end of file diff --git a/test/fixtures/es-modules/pjson-main/main.js b/test/fixtures/es-modules/pjson-main/main.js new file mode 100644 index 00000000000000..dfdd47b877319c --- /dev/null +++ b/test/fixtures/es-modules/pjson-main/main.js @@ -0,0 +1 @@ +module.exports = 'main'; diff --git a/test/fixtures/es-modules/pjson-main/package.json b/test/fixtures/es-modules/pjson-main/package.json new file mode 100644 index 00000000000000..c13b8cf6acfd33 --- /dev/null +++ b/test/fixtures/es-modules/pjson-main/package.json @@ -0,0 +1,3 @@ +{ + "main": "main.js" +} diff --git a/test/message/events_unhandled_error_common_trace.js b/test/message/events_unhandled_error_common_trace.js new file mode 100644 index 00000000000000..e6c168fc06eb33 --- /dev/null +++ b/test/message/events_unhandled_error_common_trace.js @@ -0,0 +1,20 @@ +'use strict'; +require('../common'); +const EventEmitter = require('events'); + +function foo() { + function bar() { + return new Error('foo:bar'); + } + + return bar(); +} + +const ee = new EventEmitter(); +const err = foo(); + +function quux() { + ee.emit('error', err); +} + +quux(); diff --git a/test/message/events_unhandled_error_common_trace.out b/test/message/events_unhandled_error_common_trace.out new file mode 100644 index 00000000000000..d39a95cb77f068 --- /dev/null +++ b/test/message/events_unhandled_error_common_trace.out @@ -0,0 +1,22 @@ +events.js:* + throw er; // Unhandled 'error' event + ^ + +Error: foo:bar + at bar (*events_unhandled_error_common_trace.js:*:*) + at foo (*events_unhandled_error_common_trace.js:*:*) + at Object. (*events_unhandled_error_common_trace.js:*:*) + at Module._compile (module.js:*:*) + at Object.Module._extensions..js (module.js:*:*) + at Module.load (module.js:*:*) + at tryModuleLoad (module.js:*:*) + at Function.Module._load (module.js:*:*) + at Function.Module.runMain (module.js:*:*) + at startup (bootstrap_node.js:*:*) +Emitted 'error' event at: + at quux (*events_unhandled_error_common_trace.js:*:*) + at Object. (*events_unhandled_error_common_trace.js:*:*) + at Module._compile (module.js:*:*) + [... lines matching original stack trace ...] + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* diff --git a/test/message/events_unhandled_error_nexttick.js b/test/message/events_unhandled_error_nexttick.js new file mode 100644 index 00000000000000..713031eeefa042 --- /dev/null +++ b/test/message/events_unhandled_error_nexttick.js @@ -0,0 +1,7 @@ +'use strict'; +require('../common'); +const EventEmitter = require('events'); +const er = new Error(); +process.nextTick(() => { + new EventEmitter().emit('error', er); +}); diff --git a/test/message/events_unhandled_error_nexttick.out b/test/message/events_unhandled_error_nexttick.out new file mode 100644 index 00000000000000..f0591610ffdb31 --- /dev/null +++ b/test/message/events_unhandled_error_nexttick.out @@ -0,0 +1,20 @@ +events.js:* + throw er; // Unhandled 'error' event + ^ + +Error + at Object. (*events_unhandled_error_nexttick.js:*:*) + at Module._compile (module.js:*:*) + at Object.Module._extensions..js (module.js:*:*) + at Module.load (module.js:*:*) + at tryModuleLoad (module.js:*:*) + at Function.Module._load (module.js:*:*) + at Function.Module.runMain (module.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* +Emitted 'error' event at: + at process.nextTick (*events_unhandled_error_nexttick.js:*:*) + at process._tickCallback (internal/process/next_tick.js:*:*) + at Function.Module.runMain (module.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* diff --git a/test/message/events_unhandled_error_sameline.js b/test/message/events_unhandled_error_sameline.js new file mode 100644 index 00000000000000..1e5e77d08c9919 --- /dev/null +++ b/test/message/events_unhandled_error_sameline.js @@ -0,0 +1,4 @@ +'use strict'; +require('../common'); +const EventEmitter = require('events'); +new EventEmitter().emit('error', new Error()); diff --git a/test/message/events_unhandled_error_sameline.out b/test/message/events_unhandled_error_sameline.out new file mode 100644 index 00000000000000..100c294276d04b --- /dev/null +++ b/test/message/events_unhandled_error_sameline.out @@ -0,0 +1,19 @@ +events.js:* + throw er; // Unhandled 'error' event + ^ + +Error + at Object. (*events_unhandled_error_sameline.js:*:*) + at Module._compile (module.js:*:*) + at Object.Module._extensions..js (module.js:*:*) + at Module.load (module.js:*:*) + at tryModuleLoad (module.js:*:*) + at Function.Module._load (module.js:*:*) + at Function.Module.runMain (module.js:*:*) + at startup (bootstrap_node.js:*:*) + at bootstrap_node.js:*:* +Emitted 'error' event at: + at Object. (*events_unhandled_error_sameline.js:*:*) + at Module._compile (module.js:*:*) + [... lines matching original stack trace ...] + at bootstrap_node.js:*:* diff --git a/test/parallel/test-async-hooks-constructor.js b/test/parallel/test-async-hooks-constructor.js new file mode 100644 index 00000000000000..f2b4df6a9f9f99 --- /dev/null +++ b/test/parallel/test-async-hooks-constructor.js @@ -0,0 +1,23 @@ +'use strict'; + +// This tests that AsyncHooks throws an error if bad parameters are passed. + +const common = require('../common'); +const async_hooks = require('async_hooks'); +const non_function = 10; + +typeErrorForFunction('init'); +typeErrorForFunction('before'); +typeErrorForFunction('after'); +typeErrorForFunction('destroy'); +typeErrorForFunction('promiseResolve'); + +function typeErrorForFunction(functionName) { + common.expectsError(() => { + async_hooks.createHook({ [functionName]: non_function }); + }, { + code: 'ERR_ASYNC_CALLBACK', + type: TypeError, + message: `hook.${functionName} must be a function` + }); +} diff --git a/test/parallel/test-buffer-badhex.js b/test/parallel/test-buffer-badhex.js index 94de97181d38e0..61086659fa7b6e 100644 --- a/test/parallel/test-buffer-badhex.js +++ b/test/parallel/test-buffer-badhex.js @@ -6,12 +6,12 @@ const assert = require('assert'); { const buf = Buffer.alloc(4); assert.strictEqual(buf.length, 4); - assert.deepStrictEqual(buf, new Buffer([0, 0, 0, 0])); + assert.deepStrictEqual(buf, Buffer.from([0, 0, 0, 0])); assert.strictEqual(buf.write('abcdxx', 0, 'hex'), 2); - assert.deepStrictEqual(buf, new Buffer([0xab, 0xcd, 0x00, 0x00])); + assert.deepStrictEqual(buf, Buffer.from([0xab, 0xcd, 0x00, 0x00])); assert.strictEqual(buf.toString('hex'), 'abcd0000'); assert.strictEqual(buf.write('abcdef01', 0, 'hex'), 4); - assert.deepStrictEqual(buf, new Buffer([0xab, 0xcd, 0xef, 0x01])); + assert.deepStrictEqual(buf, Buffer.from([0xab, 0xcd, 0xef, 0x01])); assert.strictEqual(buf.toString('hex'), 'abcdef01'); const copy = Buffer.from(buf.toString('hex'), 'hex'); @@ -26,13 +26,13 @@ const assert = require('assert'); { const buf = Buffer.alloc(4); - assert.deepStrictEqual(buf, new Buffer([0, 0, 0, 0])); + assert.deepStrictEqual(buf, Buffer.from([0, 0, 0, 0])); assert.strictEqual(buf.write('xxabcd', 0, 'hex'), 0); - assert.deepStrictEqual(buf, new Buffer([0, 0, 0, 0])); + assert.deepStrictEqual(buf, Buffer.from([0, 0, 0, 0])); assert.strictEqual(buf.write('xxab', 1, 'hex'), 0); - assert.deepStrictEqual(buf, new Buffer([0, 0, 0, 0])); + assert.deepStrictEqual(buf, Buffer.from([0, 0, 0, 0])); assert.strictEqual(buf.write('cdxxab', 0, 'hex'), 1); - assert.deepStrictEqual(buf, new Buffer([0xcd, 0, 0, 0])); + assert.deepStrictEqual(buf, Buffer.from([0xcd, 0, 0, 0])); } { diff --git a/test/parallel/test-buffer-fill.js b/test/parallel/test-buffer-fill.js index 2b36af38d0b2b8..14f44767c4452a 100644 --- a/test/parallel/test-buffer-fill.js +++ b/test/parallel/test-buffer-fill.js @@ -427,7 +427,7 @@ common.expectsError(() => { // Test that bypassing 'length' won't cause an abort. common.expectsError(() => { - const buf = new Buffer('w00t'); + const buf = Buffer.from('w00t'); Object.defineProperty(buf, 'length', { value: 1337, enumerable: true diff --git a/test/parallel/test-buffer-indexof.js b/test/parallel/test-buffer-indexof.js index 08d640b1dc1924..357558c74d2edc 100644 --- a/test/parallel/test-buffer-indexof.js +++ b/test/parallel/test-buffer-indexof.js @@ -504,7 +504,7 @@ assert.strictEqual(buf_bc.lastIndexOf('你好', 5, 'binary'), -1); assert.strictEqual(buf_bc.lastIndexOf(Buffer.from('你好'), 7), -1); // Test lastIndexOf on a longer buffer: -const bufferString = new Buffer('a man a plan a canal panama'); +const bufferString = Buffer.from('a man a plan a canal panama'); assert.strictEqual(15, bufferString.lastIndexOf('canal')); assert.strictEqual(21, bufferString.lastIndexOf('panama')); assert.strictEqual(0, bufferString.lastIndexOf('a man a plan a canal panama')); @@ -566,7 +566,7 @@ const parts = []; for (let i = 0; i < 1000000; i++) { parts.push((countBits(i) % 2 === 0) ? 'yolo' : 'swag'); } -const reallyLong = new Buffer(parts.join(' ')); +const reallyLong = Buffer.from(parts.join(' ')); assert.strictEqual('yolo swag swag yolo', reallyLong.slice(0, 19).toString()); // Expensive reverse searches. Stress test lastIndexOf: diff --git a/test/parallel/test-buffer-zero-fill.js b/test/parallel/test-buffer-zero-fill.js index 1aca4e9a5c438e..7a9f0c12500481 100644 --- a/test/parallel/test-buffer-zero-fill.js +++ b/test/parallel/test-buffer-zero-fill.js @@ -3,6 +3,7 @@ require('../common'); const assert = require('assert'); +// Tests deprecated Buffer API on purpose const buf1 = Buffer(100); const buf2 = new Buffer(100); diff --git a/test/parallel/test-cli-node-options.js b/test/parallel/test-cli-node-options.js index c9f2d5eace022d..8eae27b1a2a3a2 100644 --- a/test/parallel/test-cli-node-options.js +++ b/test/parallel/test-cli-node-options.js @@ -25,6 +25,8 @@ expect('--throw-deprecation', 'B\n'); expect('--zero-fill-buffers', 'B\n'); expect('--v8-pool-size=10', 'B\n'); expect('--trace-event-categories node', 'B\n'); +// eslint-disable-next-line no-template-curly-in-string +expect('--trace-event-file-pattern {pid}-${rotation}.trace_events', 'B\n'); if (!common.isWindows) { expect('--perf-basic-prof', 'B\n'); diff --git a/test/parallel/test-crypto-binary-default.js b/test/parallel/test-crypto-binary-default.js index ad11b59f096dfd..ffc29e7ac8b640 100644 --- a/test/parallel/test-crypto-binary-default.js +++ b/test/parallel/test-crypto-binary-default.js @@ -63,353 +63,365 @@ assert.throws(function() { }, /^Error: not enough data$/); // Test HMAC -const hmacHash = crypto.createHmac('sha1', 'Node') - .update('some data') - .update('to hmac') - .digest('hex'); -assert.strictEqual(hmacHash, '19fd6e1ba73d9ed2224dd5094a71babe85d9a892'); +{ + const hmacHash = crypto.createHmac('sha1', 'Node') + .update('some data') + .update('to hmac') + .digest('hex'); + assert.strictEqual(hmacHash, '19fd6e1ba73d9ed2224dd5094a71babe85d9a892'); +} // Test HMAC-SHA-* (rfc 4231 Test Cases) -const rfc4231 = [ - { - key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), - data: Buffer.from('4869205468657265', 'hex'), // 'Hi There' - hmac: { - sha224: '896fb1128abbdf196832107cd49df33f47b4b1169912ba4f53684b22', - sha256: - 'b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c' + - '2e32cff7', - sha384: - 'afd03944d84895626b0825f4ab46907f15f9dadbe4101ec682aa034c' + - '7cebc59cfaea9ea9076ede7f4af152e8b2fa9cb6', - sha512: - '87aa7cdea5ef619d4ff0b4241a1d6cb02379f4e2ce4ec2787ad0b305' + - '45e17cdedaa833b7d6b8a702038b274eaea3f4e4be9d914eeb61f170' + - '2e696c203a126854' - } - }, - { - key: Buffer.from('4a656665', 'hex'), // 'Jefe' - data: Buffer.from('7768617420646f2079612077616e7420666f72206e6f74686' + - '96e673f', 'hex'), // 'what do ya want for nothing?' - hmac: { - sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44', - sha256: - '5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b9' + - '64ec3843', - sha384: - 'af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec373' + - '6322445e8e2240ca5e69e2c78b3239ecfab21649', - sha512: - '164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7' + - 'ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b' + - '636e070a38bce737' - } - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), - data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddddddd' + - 'ddddddddddddddddddddddddddddddddddddddddddddddddddd', - 'hex'), - hmac: { - sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad2649365b0c1f65d69d1ec8333ea', - sha256: - '773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514' + - 'ced565fe', - sha384: - '88062608d3e6ad8a0aa2ace014c8a86f0aa635d947ac9febe83ef4e5' + - '5966144b2a5ab39dc13814b94e3ab6e101a34f27', - sha512: - 'fa73b0089d56a284efb0f0756c890be9b1b5dbdd8ee81a3655f83e33' + - 'b2279d39bf3e848279a722c806b485a47e67c807b946a337bee89426' + - '74278859e13292fb' - } - }, - { - key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'hex'), - data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + - 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd', - 'hex'), - hmac: { - sha224: '6c11506874013cac6a2abc1bb382627cec6a90d86efc012de7afec5a', - sha256: - '82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff4' + - '6729665b', - sha384: - '3e8a69b7783c25851933ab6290af6ca77a9981480850009cc5577c6e' + - '1f573b4e6801dd23c4a7d679ccf8a386c674cffb', - sha512: - 'b0ba465637458c6990e5a8c5f61d4af7e576d97ff94b872de76f8050' + - '361ee3dba91ca5c11aa25eb4d679275cc5788063a5f19741120c4f2d' + - 'e2adebeb10a298dd' - } - }, - - { - key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), - // 'Test With Truncation' - data: Buffer.from('546573742057697468205472756e636174696f6e', 'hex'), - hmac: { - sha224: '0e2aea68a90c8d37c988bcdb9fca6fa8', - sha256: 'a3b6167473100ee06e0c796c2955552b', - sha384: '3abf34c3503b2a23a46efc619baef897', - sha512: '415fad6271580a531d4179bc891d87a6' +{ + const rfc4231 = [ + { + key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), + data: Buffer.from('4869205468657265', 'hex'), // 'Hi There' + hmac: { + sha224: '896fb1128abbdf196832107cd49df33f47b4b1169912ba4f53684b22', + sha256: + 'b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c' + + '2e32cff7', + sha384: + 'afd03944d84895626b0825f4ab46907f15f9dadbe4101ec682aa034c' + + '7cebc59cfaea9ea9076ede7f4af152e8b2fa9cb6', + sha512: + '87aa7cdea5ef619d4ff0b4241a1d6cb02379f4e2ce4ec2787ad0b305' + + '45e17cdedaa833b7d6b8a702038b274eaea3f4e4be9d914eeb61f170' + + '2e696c203a126854' + } }, - truncate: true - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaa', 'hex'), - // 'Test Using Larger Than Block-Size Key - Hash Key First' - data: Buffer.from('54657374205573696e67204c6172676572205468616e20426' + - 'c6f636b2d53697a65204b6579202d2048617368204b657920' + - '4669727374', 'hex'), - hmac: { - sha224: '95e9a0db962095adaebe9b2d6f0dbce2d499f112f2d2b7273fa6870e', - sha256: - '60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f' + - '0ee37f54', - sha384: - '4ece084485813e9088d2c63a041bc5b44f9ef1012a2b588f3cd11f05' + - '033ac4c60c2ef6ab4030fe8296248df163f44952', - sha512: - '80b24263c7c1a3ebb71493c1dd7be8b49b46d1f41b4aeec1121b0137' + - '83f8f3526b56d037e05f2598bd0fd2215d6a1e5295e64f73f63f0aec' + - '8b915a985d786598' - } - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaa', 'hex'), - // 'This is a test using a larger than block-size key and a larger ' + - // 'than block-size data. The key needs to be hashed before being ' + - // 'used by the HMAC algorithm.' - data: Buffer.from('5468697320697320612074657374207573696e672061206c6' + - '172676572207468616e20626c6f636b2d73697a65206b6579' + - '20616e642061206c6172676572207468616e20626c6f636b2' + - 'd73697a6520646174612e20546865206b6579206e65656473' + - '20746f20626520686173686564206265666f7265206265696' + - 'e6720757365642062792074686520484d414320616c676f72' + - '6974686d2e', 'hex'), - hmac: { - sha224: '3a854166ac5d9f023f54d517d0b39dbd946770db9c2b95c9f6f565d1', - sha256: - '9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f5153' + - '5c3a35e2', - sha384: - '6617178e941f020d351e2f254e8fd32c602420feb0b8fb9adccebb82' + - '461e99c5a678cc31e799176d3860e6110c46523e', - sha512: - 'e37b6a775dc87dbaa4dfa9f96e5e3ffddebd71f8867289865df5a32d' + - '20cdc944b6022cac3c4982b10d5eeb55c3e4de15134676fb6de04460' + - '65c97440fa8c6a58' + { + key: Buffer.from('4a656665', 'hex'), // 'Jefe' + data: Buffer.from('7768617420646f2079612077616e7420666f72206e6f74686' + + '96e673f', 'hex'), // 'what do ya want for nothing?' + hmac: { + sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44', + sha256: + '5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b9' + + '64ec3843', + sha384: + 'af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec373' + + '6322445e8e2240ca5e69e2c78b3239ecfab21649', + sha512: + '164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7' + + 'ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b' + + '636e070a38bce737' + } + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), + data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddddddd' + + 'ddddddddddddddddddddddddddddddddddddddddddddddddddd', + 'hex'), + hmac: { + sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad2649365b0c1f65d69d1ec8333ea', + sha256: + '773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514' + + 'ced565fe', + sha384: + '88062608d3e6ad8a0aa2ace014c8a86f0aa635d947ac9febe83ef4e5' + + '5966144b2a5ab39dc13814b94e3ab6e101a34f27', + sha512: + 'fa73b0089d56a284efb0f0756c890be9b1b5dbdd8ee81a3655f83e33' + + 'b2279d39bf3e848279a722c806b485a47e67c807b946a337bee89426' + + '74278859e13292fb' + } + }, + { + key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'hex'), + data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + + 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd', + 'hex'), + hmac: { + sha224: '6c11506874013cac6a2abc1bb382627cec6a90d86efc012de7afec5a', + sha256: + '82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff4' + + '6729665b', + sha384: + '3e8a69b7783c25851933ab6290af6ca77a9981480850009cc5577c6e' + + '1f573b4e6801dd23c4a7d679ccf8a386c674cffb', + sha512: + 'b0ba465637458c6990e5a8c5f61d4af7e576d97ff94b872de76f8050' + + '361ee3dba91ca5c11aa25eb4d679275cc5788063a5f19741120c4f2d' + + 'e2adebeb10a298dd' + } + }, + { + key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), + // 'Test With Truncation' + data: Buffer.from('546573742057697468205472756e636174696f6e', 'hex'), + hmac: { + sha224: '0e2aea68a90c8d37c988bcdb9fca6fa8', + sha256: 'a3b6167473100ee06e0c796c2955552b', + sha384: '3abf34c3503b2a23a46efc619baef897', + sha512: '415fad6271580a531d4179bc891d87a6' + }, + truncate: true + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaa', 'hex'), + // 'Test Using Larger Than Block-Size Key - Hash Key First' + data: Buffer.from('54657374205573696e67204c6172676572205468616e20426' + + 'c6f636b2d53697a65204b6579202d2048617368204b657920' + + '4669727374', 'hex'), + hmac: { + sha224: '95e9a0db962095adaebe9b2d6f0dbce2d499f112f2d2b7273fa6870e', + sha256: + '60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f' + + '0ee37f54', + sha384: + '4ece084485813e9088d2c63a041bc5b44f9ef1012a2b588f3cd11f05' + + '033ac4c60c2ef6ab4030fe8296248df163f44952', + sha512: + '80b24263c7c1a3ebb71493c1dd7be8b49b46d1f41b4aeec1121b0137' + + '83f8f3526b56d037e05f2598bd0fd2215d6a1e5295e64f73f63f0aec' + + '8b915a985d786598' + } + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaa', 'hex'), + // 'This is a test using a larger than block-size key and a larger ' + + // 'than block-size data. The key needs to be hashed before being ' + + // 'used by the HMAC algorithm.' + data: Buffer.from('5468697320697320612074657374207573696e672061206c6' + + '172676572207468616e20626c6f636b2d73697a65206b6579' + + '20616e642061206c6172676572207468616e20626c6f636b2' + + 'd73697a6520646174612e20546865206b6579206e65656473' + + '20746f20626520686173686564206265666f7265206265696' + + 'e6720757365642062792074686520484d414320616c676f72' + + '6974686d2e', 'hex'), + hmac: { + sha224: '3a854166ac5d9f023f54d517d0b39dbd946770db9c2b95c9f6f565d1', + sha256: + '9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f5153' + + '5c3a35e2', + sha384: + '6617178e941f020d351e2f254e8fd32c602420feb0b8fb9adccebb82' + + '461e99c5a678cc31e799176d3860e6110c46523e', + sha512: + 'e37b6a775dc87dbaa4dfa9f96e5e3ffddebd71f8867289865df5a32d' + + '20cdc944b6022cac3c4982b10d5eeb55c3e4de15134676fb6de04460' + + '65c97440fa8c6a58' + } } - } -]; - -for (const testCase of rfc4231) { - for (const hash in testCase.hmac) { - let result = crypto.createHmac(hash, testCase.key) - .update(testCase.data) - .digest('hex'); - if (testCase.truncate) { - result = result.substr(0, 32); // first 128 bits == 32 hex chars + ]; + + for (const testCase of rfc4231) { + for (const hash in testCase.hmac) { + let result = crypto.createHmac(hash, testCase.key) + .update(testCase.data) + .digest('hex'); + if (testCase.truncate) { + result = result.substr(0, 32); // first 128 bits == 32 hex chars + } + assert.strictEqual( + testCase.hmac[hash], + result + ); } - assert.strictEqual( - testCase.hmac[hash], - result - ); } } // Test HMAC-MD5/SHA1 (rfc 2202 Test Cases) -const rfc2202_md5 = [ - { - key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), - data: 'Hi There', - hmac: '9294727a3638bb1c13f48ef8158bfc9d' - }, - { - key: 'Jefe', - data: 'what do ya want for nothing?', - hmac: '750c783e6ab0b503eaa86e310a5db738' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), - data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddddddd' + - 'ddddddddddddddddddddddddddddddddddddddddddddddddddd', - 'hex'), - hmac: '56be34521d144c88dbb8c733f0e8b3f6' - }, - { - key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'hex'), - data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + - 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd' + - 'cdcdcdcdcd', - 'hex'), - hmac: '697eaf0aca3a3aea3a75164746ffaa79' - }, - { - key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), - data: 'Test With Truncation', - hmac: '56461ef2342edc00f9bab995690efd4c' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaa', - 'hex'), - data: 'Test Using Larger Than Block-Size Key - Hash Key First', - hmac: '6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaa', - 'hex'), - data: - 'Test Using Larger Than Block-Size Key and Larger Than One ' + - 'Block-Size Data', - hmac: '6f630fad67cda0ee1fb1f562db3aa53e' - } -]; -const rfc2202_sha1 = [ - { - key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), - data: 'Hi There', - hmac: 'b617318655057264e28bc0b6fb378c8ef146be00' - }, - { - key: 'Jefe', - data: 'what do ya want for nothing?', - hmac: 'effcdf6ae5eb2fa2d27416d5f184df9c259a7c79' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), - data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddd' + - 'ddddddddddddddddddddddddddddddddddddddddddddd' + - 'dddddddddd', - 'hex'), - hmac: '125d7342b9ac11cd91a39af48aa17b4f63f175d3' - }, - { - key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'hex'), - data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + - 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd' + - 'cdcdcdcdcd', - 'hex'), - hmac: '4c9007f4026250c6bc8414f9bf50c86c2d7235da' - }, - { - key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), - data: 'Test With Truncation', - hmac: '4c1a03424b55e07fe7f27be1d58bb9324a9a5a04' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaa', - 'hex'), - data: 'Test Using Larger Than Block-Size Key - Hash Key First', - hmac: 'aa4ae5e15272d00e95705637ce8a3b55ed402112' - }, - { - key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + - 'aaaaaaaaaaaaaaaaaaaaaa', - 'hex'), - data: - 'Test Using Larger Than Block-Size Key and Larger Than One ' + - 'Block-Size Data', - hmac: 'e8e99d0f45237d786d6bbaa7965c7808bbff1a91' +{ + const rfc2202_md5 = [ + { + key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), + data: 'Hi There', + hmac: '9294727a3638bb1c13f48ef8158bfc9d' + }, + { + key: 'Jefe', + data: 'what do ya want for nothing?', + hmac: '750c783e6ab0b503eaa86e310a5db738' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), + data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddddddd' + + 'ddddddddddddddddddddddddddddddddddddddddddddddddddd', + 'hex'), + hmac: '56be34521d144c88dbb8c733f0e8b3f6' + }, + { + key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'hex'), + data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + + 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd' + + 'cdcdcdcdcd', + 'hex'), + hmac: '697eaf0aca3a3aea3a75164746ffaa79' + }, + { + key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), + data: 'Test With Truncation', + hmac: '56461ef2342edc00f9bab995690efd4c' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaa', + 'hex'), + data: 'Test Using Larger Than Block-Size Key - Hash Key First', + hmac: '6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaa', + 'hex'), + data: + 'Test Using Larger Than Block-Size Key and Larger Than One ' + + 'Block-Size Data', + hmac: '6f630fad67cda0ee1fb1f562db3aa53e' + } + ]; + const rfc2202_sha1 = [ + { + key: Buffer.from('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', 'hex'), + data: 'Hi There', + hmac: 'b617318655057264e28bc0b6fb378c8ef146be00' + }, + { + key: 'Jefe', + data: 'what do ya want for nothing?', + hmac: 'effcdf6ae5eb2fa2d27416d5f184df9c259a7c79' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'hex'), + data: Buffer.from('ddddddddddddddddddddddddddddddddddddddddddddd' + + 'ddddddddddddddddddddddddddddddddddddddddddddd' + + 'dddddddddd', + 'hex'), + hmac: '125d7342b9ac11cd91a39af48aa17b4f63f175d3' + }, + { + key: Buffer.from('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'hex'), + data: Buffer.from('cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdc' + + 'dcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd' + + 'cdcdcdcdcd', + 'hex'), + hmac: '4c9007f4026250c6bc8414f9bf50c86c2d7235da' + }, + { + key: Buffer.from('0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c', 'hex'), + data: 'Test With Truncation', + hmac: '4c1a03424b55e07fe7f27be1d58bb9324a9a5a04' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaa', + 'hex'), + data: 'Test Using Larger Than Block-Size Key - Hash Key First', + hmac: 'aa4ae5e15272d00e95705637ce8a3b55ed402112' + }, + { + key: Buffer.from('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + + 'aaaaaaaaaaaaaaaaaaaaaa', + 'hex'), + data: + 'Test Using Larger Than Block-Size Key and Larger Than One ' + + 'Block-Size Data', + hmac: 'e8e99d0f45237d786d6bbaa7965c7808bbff1a91' + } + ]; + + if (!common.hasFipsCrypto) { + for (const testCase of rfc2202_md5) { + assert.strictEqual( + testCase.hmac, + crypto.createHmac('md5', testCase.key) + .update(testCase.data) + .digest('hex') + ); + } } -]; - -if (!common.hasFipsCrypto) { - for (const testCase of rfc2202_md5) { + for (const testCase of rfc2202_sha1) { assert.strictEqual( testCase.hmac, - crypto.createHmac('md5', testCase.key) + crypto.createHmac('sha1', testCase.key) .update(testCase.data) .digest('hex') ); } } -for (const testCase of rfc2202_sha1) { - assert.strictEqual( - testCase.hmac, - crypto.createHmac('sha1', testCase.key) - .update(testCase.data) - .digest('hex') - ); -} // Test hashing -const a1 = crypto.createHash('sha1').update('Test123').digest('hex'); -const a2 = crypto.createHash('sha256').update('Test123').digest('base64'); -const a3 = crypto.createHash('sha512').update('Test123').digest(); // binary -const a4 = crypto.createHash('sha1').update('Test123').digest('buffer'); +{ + const a1 = crypto.createHash('sha1').update('Test123').digest('hex'); + const a2 = crypto.createHash('sha256').update('Test123').digest('base64'); + const a3 = crypto.createHash('sha512').update('Test123').digest(); // binary + const a4 = crypto.createHash('sha1').update('Test123').digest('buffer'); -if (!common.hasFipsCrypto) { - const a0 = crypto.createHash('md5').update('Test123').digest('latin1'); - assert.strictEqual( - a0, - 'h\u00ea\u00cb\u0097\u00d8o\fF!\u00fa+\u000e\u0017\u00ca\u00bd\u008c' - ); -} + if (!common.hasFipsCrypto) { + const a0 = crypto.createHash('md5').update('Test123').digest('latin1'); + assert.strictEqual( + a0, + 'h\u00ea\u00cb\u0097\u00d8o\fF!\u00fa+\u000e\u0017\u00ca\u00bd\u008c' + ); + } -assert.strictEqual(a1, '8308651804facb7b9af8ffc53a33a22d6a1c8ac2'); + assert.strictEqual(a1, '8308651804facb7b9af8ffc53a33a22d6a1c8ac2'); -assert.strictEqual(a2, '2bX1jws4GYKTlxhloUB09Z66PoJZW+y+hq5R8dnx9l4='); + assert.strictEqual(a2, '2bX1jws4GYKTlxhloUB09Z66PoJZW+y+hq5R8dnx9l4='); -assert.strictEqual( - a3, - '\u00c1(4\u00f1\u0003\u001fd\u0097!O\'\u00d4C/&Qz\u00d4' + - '\u0094\u0015l\u00b8\u008dQ+\u00db\u001d\u00c4\u00b5}\u00b2' + - '\u00d6\u0092\u00a3\u00df\u00a2i\u00a1\u009b\n\n*\u000f' + - '\u00d7\u00d6\u00a2\u00a8\u0085\u00e3<\u0083\u009c\u0093' + - '\u00c2\u0006\u00da0\u00a1\u00879(G\u00ed\'', - 'Test SHA512 as assumed latin1' -); + // Test SHA512 as assumed latin1 + assert.strictEqual( + a3, + '\u00c1(4\u00f1\u0003\u001fd\u0097!O\'\u00d4C/&Qz\u00d4' + + '\u0094\u0015l\u00b8\u008dQ+\u00db\u001d\u00c4\u00b5}\u00b2' + + '\u00d6\u0092\u00a3\u00df\u00a2i\u00a1\u009b\n\n*\u000f' + + '\u00d7\u00d6\u00a2\u00a8\u0085\u00e3<\u0083\u009c\u0093' + + '\u00c2\u0006\u00da0\u00a1\u00879(G\u00ed\'' + ); -assert.deepStrictEqual( - a4, - Buffer.from('8308651804facb7b9af8ffc53a33a22d6a1c8ac2', 'hex') -); + assert.deepStrictEqual( + a4, + Buffer.from('8308651804facb7b9af8ffc53a33a22d6a1c8ac2', 'hex') + ); +} // Test multiple updates to same hash -const h1 = crypto.createHash('sha1').update('Test123').digest('hex'); -const h2 = crypto.createHash('sha1').update('Test').update('123').digest('hex'); -assert.strictEqual(h1, h2); +{ + const h1 = crypto.createHash('sha1').update('Test123').digest('hex'); + const h2 = crypto.createHash('sha1').update('Test').update('123') + .digest('hex'); + assert.strictEqual(h1, h2); +} // Test hashing for binary files -const fn = fixtures.path('sample.png'); -const sha1Hash = crypto.createHash('sha1'); -const fileStream = fs.createReadStream(fn); -fileStream.on('data', function(data) { - sha1Hash.update(data); -}); -fileStream.on('close', common.mustCall(function() { - assert.strictEqual( - sha1Hash.digest('hex'), - '22723e553129a336ad96e10f6aecdf0f45e4149e' - ); -})); +{ + const fn = fixtures.path('sample.png'); + const sha1Hash = crypto.createHash('sha1'); + const fileStream = fs.createReadStream(fn); + fileStream.on('data', function(data) { + sha1Hash.update(data); + }); + fileStream.on('close', common.mustCall(function() { + assert.strictEqual( + sha1Hash.digest('hex'), + '22723e553129a336ad96e10f6aecdf0f45e4149e' + ); + })); +} // Unknown digest method should throw an error: // https://github.com/nodejs/node-v0.x-archive/issues/2227 @@ -418,32 +430,34 @@ assert.throws(function() { }, /^Error: Digest method not supported$/); // Test signing and verifying -const s1 = crypto.createSign('SHA1') - .update('Test123') - .sign(keyPem, 'base64'); -const s1Verified = crypto.createVerify('SHA1') - .update('Test') - .update('123') - .verify(certPem, s1, 'base64'); -assert.strictEqual(s1Verified, true); - -const s2 = crypto.createSign('SHA256') - .update('Test123') - .sign(keyPem); // binary -const s2Verified = crypto.createVerify('SHA256') - .update('Test') - .update('123') - .verify(certPem, s2); // binary -assert.strictEqual(s2Verified, true); - -const s3 = crypto.createSign('SHA1') - .update('Test123') - .sign(keyPem, 'buffer'); -const s3Verified = crypto.createVerify('SHA1') - .update('Test') - .update('123') - .verify(certPem, s3); -assert.strictEqual(s3Verified, true); +{ + const s1 = crypto.createSign('SHA1') + .update('Test123') + .sign(keyPem, 'base64'); + const s1Verified = crypto.createVerify('SHA1') + .update('Test') + .update('123') + .verify(certPem, s1, 'base64'); + assert.strictEqual(s1Verified, true); + + const s2 = crypto.createSign('SHA256') + .update('Test123') + .sign(keyPem); // binary + const s2Verified = crypto.createVerify('SHA256') + .update('Test') + .update('123') + .verify(certPem, s2); // binary + assert.strictEqual(s2Verified, true); + + const s3 = crypto.createSign('SHA1') + .update('Test123') + .sign(keyPem, 'buffer'); + const s3Verified = crypto.createVerify('SHA1') + .update('Test') + .update('123') + .verify(certPem, s3); + assert.strictEqual(s3Verified, true); +} function testCipher1(key) { @@ -569,60 +583,61 @@ common.expectsError( // Test Diffie-Hellman with two parties sharing a secret, // using various encodings as we go along -const dh1 = crypto.createDiffieHellman(common.hasFipsCrypto ? 1024 : 256); -const p1 = dh1.getPrime('buffer'); -const dh2 = crypto.createDiffieHellman(p1, 'base64'); -const key1 = dh1.generateKeys(); -const key2 = dh2.generateKeys('hex'); -const secret1 = dh1.computeSecret(key2, 'hex', 'base64'); -const secret2 = dh2.computeSecret(key1, 'latin1', 'buffer'); - -assert.strictEqual(secret1, secret2.toString('base64')); - -// Create "another dh1" using generated keys from dh1, -// and compute secret again -const dh3 = crypto.createDiffieHellman(p1, 'buffer'); -const privkey1 = dh1.getPrivateKey(); -dh3.setPublicKey(key1); -dh3.setPrivateKey(privkey1); - -assert.strictEqual(dh1.getPrime(), dh3.getPrime()); -assert.strictEqual(dh1.getGenerator(), dh3.getGenerator()); -assert.strictEqual(dh1.getPublicKey(), dh3.getPublicKey()); -assert.strictEqual(dh1.getPrivateKey(), dh3.getPrivateKey()); - -const secret3 = dh3.computeSecret(key2, 'hex', 'base64'); - -assert.strictEqual(secret1, secret3); - -// https://github.com/joyent/node/issues/2338 -const p = 'FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74' + - '020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F1437' + - '4FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED' + - 'EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF'; -const d = crypto.createDiffieHellman(p, 'hex'); -assert.strictEqual(d.verifyError, DH_NOT_SUITABLE_GENERATOR); - -// Test RSA key signing/verification -const rsaSign = crypto.createSign('SHA1'); -const rsaVerify = crypto.createVerify('SHA1'); -assert.ok(rsaSign instanceof crypto.Sign); -assert.ok(rsaVerify instanceof crypto.Verify); - -rsaSign.update(rsaPubPem); -const rsaSignature = rsaSign.sign(rsaKeyPem, 'hex'); -assert.strictEqual( - rsaSignature, - '5c50e3145c4e2497aadb0eabc83b342d0b0021ece0d4c4a064b7c' + - '8f020d7e2688b122bfb54c724ac9ee169f83f66d2fe90abeb95e8' + - 'e1290e7e177152a4de3d944cf7d4883114a20ed0f78e70e25ef0f' + - '60f06b858e6af42a2f276ede95bbc6bc9a9bbdda15bd663186a6f' + - '40819a7af19e577bb2efa5e579a1f5ce8a0d4ca8b8f6' -); - -rsaVerify.update(rsaPubPem); -assert.strictEqual(rsaVerify.verify(rsaPubPem, rsaSignature, 'hex'), true); +{ + const dh1 = crypto.createDiffieHellman(common.hasFipsCrypto ? 1024 : 256); + const p1 = dh1.getPrime('buffer'); + const dh2 = crypto.createDiffieHellman(p1, 'base64'); + const key1 = dh1.generateKeys(); + const key2 = dh2.generateKeys('hex'); + const secret1 = dh1.computeSecret(key2, 'hex', 'base64'); + const secret2 = dh2.computeSecret(key1, 'latin1', 'buffer'); + + assert.strictEqual(secret1, secret2.toString('base64')); + + // Create "another dh1" using generated keys from dh1, + // and compute secret again + const dh3 = crypto.createDiffieHellman(p1, 'buffer'); + const privkey1 = dh1.getPrivateKey(); + dh3.setPublicKey(key1); + dh3.setPrivateKey(privkey1); + + assert.strictEqual(dh1.getPrime(), dh3.getPrime()); + assert.strictEqual(dh1.getGenerator(), dh3.getGenerator()); + assert.strictEqual(dh1.getPublicKey(), dh3.getPublicKey()); + assert.strictEqual(dh1.getPrivateKey(), dh3.getPrivateKey()); + + const secret3 = dh3.computeSecret(key2, 'hex', 'base64'); + + assert.strictEqual(secret1, secret3); + + // https://github.com/joyent/node/issues/2338 + const p = 'FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74' + + '020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F1437' + + '4FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED' + + 'EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF'; + const d = crypto.createDiffieHellman(p, 'hex'); + assert.strictEqual(d.verifyError, DH_NOT_SUITABLE_GENERATOR); + + // Test RSA key signing/verification + const rsaSign = crypto.createSign('SHA1'); + const rsaVerify = crypto.createVerify('SHA1'); + assert.ok(rsaSign instanceof crypto.Sign); + assert.ok(rsaVerify instanceof crypto.Verify); + + rsaSign.update(rsaPubPem); + const rsaSignature = rsaSign.sign(rsaKeyPem, 'hex'); + assert.strictEqual( + rsaSignature, + '5c50e3145c4e2497aadb0eabc83b342d0b0021ece0d4c4a064b7c' + + '8f020d7e2688b122bfb54c724ac9ee169f83f66d2fe90abeb95e8' + + 'e1290e7e177152a4de3d944cf7d4883114a20ed0f78e70e25ef0f' + + '60f06b858e6af42a2f276ede95bbc6bc9a9bbdda15bd663186a6f' + + '40819a7af19e577bb2efa5e579a1f5ce8a0d4ca8b8f6' + ); + rsaVerify.update(rsaPubPem); + assert.strictEqual(rsaVerify.verify(rsaPubPem, rsaSignature, 'hex'), true); +} // // Test RSA signing and verification diff --git a/test/parallel/test-crypto-hmac.js b/test/parallel/test-crypto-hmac.js index 92fa16f98cda44..0597891e0041a7 100644 --- a/test/parallel/test-crypto-hmac.js +++ b/test/parallel/test-crypto-hmac.js @@ -269,7 +269,12 @@ for (let i = 0, l = rfc4231.length; i < l; i++) { expected, `Test HMAC-${hash} rfc 4231 case ${i + 1}: ${actual} must be ${expected}` ); - assert.strictEqual(actual, strRes, 'Should get same result from stream'); + assert.strictEqual( + actual, + strRes, + `Should get same result from stream (hash: ${hash} and case: ${i + 1})` + + ` => ${actual} must be ${strRes}` + ); } } diff --git a/test/parallel/test-dns-multi-channel.js b/test/parallel/test-dns-multi-channel.js index 63e82c3ed791a4..bd88fe0b24fc75 100644 --- a/test/parallel/test-dns-multi-channel.js +++ b/test/parallel/test-dns-multi-channel.js @@ -31,7 +31,7 @@ for (const { socket, reply } of servers) { })); socket.bind(0, common.mustCall(() => { - if (0 === --waiting) ready(); + if (--waiting === 0) ready(); })); } diff --git a/test/parallel/test-events-uncaught-exception-stack.js b/test/parallel/test-events-uncaught-exception-stack.js new file mode 100644 index 00000000000000..c55322a5aa56c4 --- /dev/null +++ b/test/parallel/test-events-uncaught-exception-stack.js @@ -0,0 +1,16 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const EventEmitter = require('events'); + +// Tests that the error stack where the exception was thrown is *not* appended. + +process.on('uncaughtException', common.mustCall((err) => { + const lines = err.stack.split('\n'); + assert.strictEqual(lines[0], 'Error'); + lines.slice(1).forEach((line) => { + assert(/^ at/.test(line), `${line} has an unexpected format`); + }); +})); + +new EventEmitter().emit('error', new Error()); diff --git a/test/parallel/test-fs-watch-recursive.js b/test/parallel/test-fs-watch-recursive.js index 82d87aa2ecbb93..4985ece0e0ec15 100644 --- a/test/parallel/test-fs-watch-recursive.js +++ b/test/parallel/test-fs-watch-recursive.js @@ -24,7 +24,7 @@ const watcher = fs.watch(testDir, { recursive: true }); let watcherClosed = false; watcher.on('change', function(event, filename) { - assert.ok('change' === event || 'rename' === event); + assert.ok(event === 'change' || event === 'rename'); // Ignore stale events generated by mkdir and other tests if (filename !== relativePathOne) diff --git a/test/parallel/test-http-client-spurious-aborted.js b/test/parallel/test-http-client-spurious-aborted.js new file mode 100644 index 00000000000000..58a2f92de94054 --- /dev/null +++ b/test/parallel/test-http-client-spurious-aborted.js @@ -0,0 +1,75 @@ +'use strict'; + +const common = require('../common'); +const http = require('http'); +const assert = require('assert'); +const { Writable } = require('stream'); +const Countdown = require('../common/countdown'); + +const N = 2; +let abortRequest = true; + +const server = http.Server(common.mustCall((req, res) => { + const headers = { 'Content-Type': 'text/plain' }; + headers['Content-Length'] = 50; + const socket = res.socket; + res.writeHead(200, headers); + res.write('aaaaaaaaaabbbbbbbbbbccccccccccdddddddddd'); + if (abortRequest) { + process.nextTick(() => socket.destroy()); + } else { + process.nextTick(() => res.end('eeeeeeeeee')); + } +}, N)); + +server.listen(0, common.mustCall(() => { + download(); +})); + +const finishCountdown = new Countdown(N, common.mustCall(() => { + server.close(); +})); +const reqCountdown = new Countdown(N, common.mustCall()); + +function download() { + const opts = { + port: server.address().port, + path: '/', + }; + const req = http.get(opts); + req.on('error', common.mustNotCall()); + req.on('response', (res) => { + assert.strictEqual(res.statusCode, 200); + assert.strictEqual(res.headers.connection, 'close'); + let aborted = false; + const writable = new Writable({ + write(chunk, encoding, callback) { + callback(); + } + }); + res.pipe(writable); + const _handle = res.socket._handle; + _handle._close = res.socket._handle.close; + _handle.close = function(callback) { + _handle._close(); + // set readable to true even though request is complete + if (res.complete) res.readable = true; + callback(); + }; + res.on('end', common.mustCall(() => { + reqCountdown.dec(); + })); + res.on('aborted', () => { + aborted = true; + }); + res.on('error', common.mustNotCall()); + writable.on('finish', () => { + assert.strictEqual(aborted, abortRequest); + finishCountdown.dec(); + if (finishCountdown.remaining === 0) return; + abortRequest = false; // next one should be a good response + download(); + }); + }); + req.end(); +} diff --git a/test/parallel/test-http-connect.js b/test/parallel/test-http-connect.js index 9499cd95275f8c..9b7432f03a7542 100644 --- a/test/parallel/test-http-connect.js +++ b/test/parallel/test-http-connect.js @@ -71,7 +71,8 @@ server.listen(0, common.mustCall(() => { // the stream.Duplex onend listener // allow 0 here, so that i can run the same test on streams1 impl - assert(socket.listeners('end').length <= 1); + assert(socket.listenerCount('end') <= 2, + `Found ${socket.listenerCount('end')} end listeners`); assert.strictEqual(socket.listeners('free').length, 0); assert.strictEqual(socket.listeners('close').length, 0); diff --git a/test/parallel/test-http2-client-onconnect-errors.js b/test/parallel/test-http2-client-onconnect-errors.js index 44fe6875602187..af67a0d0ae27db 100644 --- a/test/parallel/test-http2-client-onconnect-errors.js +++ b/test/parallel/test-http2-client-onconnect-errors.js @@ -88,9 +88,14 @@ function runTest(test) { req.on('error', errorMustCall); } else { client.on('error', errorMustCall); - req.on('error', common.expectsError({ - code: 'ERR_HTTP2_STREAM_CANCEL' - })); + req.on('error', (err) => { + common.expectsError({ + code: 'ERR_HTTP2_STREAM_CANCEL' + })(err); + common.expectsError({ + code: 'ERR_HTTP2_ERROR' + })(err.cause); + }); } req.on('end', common.mustCall()); diff --git a/test/parallel/test-http2-client-write-empty-string.js b/test/parallel/test-http2-client-write-empty-string.js new file mode 100644 index 00000000000000..8b8cb3d6404e1e --- /dev/null +++ b/test/parallel/test-http2-client-write-empty-string.js @@ -0,0 +1,55 @@ +'use strict'; + +const assert = require('assert'); + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const http2 = require('http2'); + +for (const chunkSequence of [ + [ '' ], + [ '', '' ] +]) { + const server = http2.createServer(); + server.on('stream', common.mustCall((stream, headers, flags) => { + stream.respond({ 'content-type': 'text/html' }); + + let data = ''; + stream.on('data', common.mustNotCall((chunk) => { + data += chunk.toString(); + })); + stream.on('end', common.mustCall(() => { + stream.end(`"${data}"`); + })); + })); + + server.listen(0, common.mustCall(() => { + const port = server.address().port; + const client = http2.connect(`http://localhost:${port}`); + + const req = client.request({ + ':method': 'POST', + ':path': '/' + }); + + req.on('response', common.mustCall((headers) => { + assert.strictEqual(headers[':status'], 200); + assert.strictEqual(headers['content-type'], 'text/html'); + })); + + let data = ''; + req.setEncoding('utf8'); + req.on('data', common.mustCallAtLeast((d) => data += d)); + req.on('end', common.mustCall(() => { + assert.strictEqual(data, '""'); + server.close(); + client.close(); + })); + + for (const chunk of chunkSequence) + req.write(chunk); + req.end(); + })); +} diff --git a/test/parallel/test-http2-compat-short-stream-client-server.js b/test/parallel/test-http2-compat-short-stream-client-server.js new file mode 100644 index 00000000000000..f7ef9412106f59 --- /dev/null +++ b/test/parallel/test-http2-compat-short-stream-client-server.js @@ -0,0 +1,50 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const http2 = require('http2'); +const { Readable } = require('stream'); + +const server = http2.createServer(common.mustCall((req, res) => { + res.setHeader('content-type', 'text/html'); + const input = new Readable({ + read() { + this.push('test'); + this.push(null); + } + }); + input.pipe(res); +})); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + const client = http2.connect(`http://localhost:${port}`); + + const req = client.request(); + + req.on('response', common.mustCall((headers) => { + assert.strictEqual(headers[':status'], 200); + assert.strictEqual(headers['content-type'], 'text/html'); + })); + + let data = ''; + + const notCallClose = common.mustNotCall(); + + setTimeout(() => { + req.setEncoding('utf8'); + req.removeListener('close', notCallClose); + req.on('close', common.mustCall(() => { + server.close(); + client.close(); + })); + req.on('data', common.mustCallAtLeast((d) => data += d)); + req.on('end', common.mustCall(() => { + assert.strictEqual(data, 'test'); + })); + }, common.platformTimeout(100)); + + req.on('close', notCallClose); +})); diff --git a/test/parallel/test-http2-https-fallback.js b/test/parallel/test-http2-https-fallback.js index 01b694e586dd49..a872d686d34f85 100644 --- a/test/parallel/test-http2-https-fallback.js +++ b/test/parallel/test-http2-https-fallback.js @@ -6,7 +6,7 @@ const fixtures = require('../common/fixtures'); if (!common.hasCrypto) common.skip('missing crypto'); -const { strictEqual } = require('assert'); +const { strictEqual, ok } = require('assert'); const { createSecureContext } = require('tls'); const { createSecureServer, connect } = require('http2'); const { get } = require('https'); @@ -31,7 +31,7 @@ function onRequest(request, response) { })); } -function onSession(session) { +function onSession(session, next) { const headers = { ':path': '/', ':method': 'GET', @@ -54,6 +54,10 @@ function onSession(session) { session.close(); this.cleanup(); + + if (typeof next === 'function') { + next(); + } })); request.end(); } @@ -126,15 +130,31 @@ function onSession(session) { connect( origin, clientOptions, - common.mustCall(onSession.bind({ cleanup, server })) + common.mustCall(function(session) { + onSession.call({ cleanup, server }, + session, + common.mustCall(testNoTls)); + }) ); - // HTTP/1.1 client - get(Object.assign(parse(origin), clientOptions), common.mustNotCall()) - .on('error', common.mustCall(cleanup)); - - // Incompatible ALPN TLS client - tls(Object.assign({ port, ALPNProtocols: ['fake'] }, clientOptions)) - .on('error', common.mustCall(cleanup)); + function testNoTls() { + // HTTP/1.1 client + get(Object.assign(parse(origin), clientOptions), common.mustNotCall) + .on('error', common.mustCall(cleanup)) + .on('error', common.mustCall(testWrongALPN)) + .end(); + } + + function testWrongALPN() { + // Incompatible ALPN TLS client + let text = ''; + tls(Object.assign({ port, ALPNProtocols: ['fake'] }, clientOptions)) + .setEncoding('utf8') + .on('data', (chunk) => text += chunk) + .on('end', common.mustCall(() => { + ok(/Unknown ALPN Protocol, expected `h2` to be available/.test(text)); + cleanup(); + })); + } })); } diff --git a/test/parallel/test-http2-short-stream-client-server.js b/test/parallel/test-http2-short-stream-client-server.js new file mode 100644 index 00000000000000..e632b8d96b9ea9 --- /dev/null +++ b/test/parallel/test-http2-short-stream-client-server.js @@ -0,0 +1,55 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const http2 = require('http2'); +const { Readable } = require('stream'); + +const server = http2.createServer(); +server.on('stream', common.mustCall((stream) => { + stream.respond({ + ':status': 200, + 'content-type': 'text/html' + }); + const input = new Readable({ + read() { + this.push('test'); + this.push(null); + } + }); + input.pipe(stream); +})); + + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + const client = http2.connect(`http://localhost:${port}`); + + const req = client.request(); + + req.on('response', common.mustCall((headers) => { + assert.strictEqual(headers[':status'], 200); + assert.strictEqual(headers['content-type'], 'text/html'); + })); + + let data = ''; + + const notCallClose = common.mustNotCall(); + + setTimeout(() => { + req.setEncoding('utf8'); + req.removeListener('close', notCallClose); + req.on('close', common.mustCall(() => { + server.close(); + client.close(); + })); + req.on('data', common.mustCallAtLeast((d) => data += d)); + req.on('end', common.mustCall(() => { + assert.strictEqual(data, 'test'); + })); + }, common.platformTimeout(100)); + + req.on('close', notCallClose); +})); diff --git a/test/parallel/test-http2-tls-disconnect.js b/test/parallel/test-http2-tls-disconnect.js new file mode 100644 index 00000000000000..2e635fe1376a51 --- /dev/null +++ b/test/parallel/test-http2-tls-disconnect.js @@ -0,0 +1,32 @@ +'use strict'; +const common = require('../common'); +const fixtures = require('../common/fixtures'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const child_process = require('child_process'); +const http2 = require('http2'); +const fs = require('fs'); + +const key = fixtures.readKey('agent8-key.pem', 'binary'); +const cert = fixtures.readKey('agent8-cert.pem', 'binary'); + +const server = http2.createSecureServer({ key, cert }, (request, response) => { + fs.createReadStream(process.execPath).pipe(response); +}); + +// This should be doable with a reproduction purely written in Node; +// that just requires somebody to take the time and actually do it. +server.listen(0, () => { + const proc = child_process.spawn('h2load', [ + '-n', '1000', + `https://localhost:${server.address().port}/` + ]); + proc.on('error', (err) => { + if (err.code === 'ENOENT') + common.skip('no h2load'); + }); + proc.on('exit', () => server.close()); + setTimeout(() => proc.kill(2), 100); +}); diff --git a/test/parallel/test-http2-write-finishes-after-stream-destroy.js b/test/parallel/test-http2-write-finishes-after-stream-destroy.js new file mode 100644 index 00000000000000..3b2dd4bcd4e548 --- /dev/null +++ b/test/parallel/test-http2-write-finishes-after-stream-destroy.js @@ -0,0 +1,62 @@ +// Flags: --expose-gc +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const http2 = require('http2'); +const makeDuplexPair = require('../common/duplexpair'); + +// Make sure the Http2Stream destructor works, since we don't clean the +// stream up like we would otherwise do. +process.on('exit', global.gc); + +{ + const { clientSide, serverSide } = makeDuplexPair(); + + let serverSideHttp2Stream; + let serverSideHttp2StreamDestroyed = false; + const server = http2.createServer(); + server.on('stream', common.mustCall((stream, headers) => { + serverSideHttp2Stream = stream; + stream.respond({ + 'content-type': 'text/html', + ':status': 200 + }); + + const originalWrite = serverSide._write; + serverSide._write = (buf, enc, cb) => { + if (serverSideHttp2StreamDestroyed) { + serverSide.destroy(); + serverSide.write = () => {}; + } else { + setImmediate(() => { + originalWrite.call(serverSide, buf, enc, () => setImmediate(cb)); + }); + } + }; + + // Enough data to fit into a single *session* window, + // not enough data to fit into a single *stream* window. + stream.write(Buffer.alloc(40000)); + })); + + server.emit('connection', serverSide); + + const client = http2.connect('http://localhost:80', { + createConnection: common.mustCall(() => clientSide) + }); + + const req = client.request({ ':path': '/' }); + + req.on('response', common.mustCall((headers) => { + assert.strictEqual(headers[':status'], 200); + })); + + req.on('data', common.mustCallAtLeast(() => { + if (!serverSideHttp2StreamDestroyed) { + serverSideHttp2Stream.destroy(); + serverSideHttp2StreamDestroyed = true; + } + })); +} diff --git a/test/parallel/test-inspector-esm.js b/test/parallel/test-inspector-esm.js index 696f2af9a77462..3171da58cf7a4c 100644 --- a/test/parallel/test-inspector-esm.js +++ b/test/parallel/test-inspector-esm.js @@ -5,6 +5,7 @@ const common = require('../common'); common.skipIfInspectorDisabled(); const assert = require('assert'); +const { resolve: UrlResolve } = require('url'); const fixtures = require('../common/fixtures'); const { NodeInstance } = require('../common/inspector-helper.js'); @@ -43,14 +44,15 @@ async function testBreakpointOnStart(session) { ]; await session.send(commands); - await session.waitForBreakOnLine(0, session.scriptURL()); + await session.waitForBreakOnLine( + 0, UrlResolve(session.scriptURL().toString(), 'message.mjs')); } async function testBreakpoint(session) { console.log('[test]', 'Setting a breakpoint and verifying it is hit'); const commands = [ { 'method': 'Debugger.setBreakpointByUrl', - 'params': { 'lineNumber': 5, + 'params': { 'lineNumber': 7, 'url': session.scriptURL(), 'columnNumber': 0, 'condition': '' @@ -66,7 +68,7 @@ async function testBreakpoint(session) { `Script source is wrong: ${scriptSource}`); await session.waitForConsoleOutput('log', ['A message', 5]); - const paused = await session.waitForBreakOnLine(5, session.scriptURL()); + const paused = await session.waitForBreakOnLine(7, session.scriptURL()); const scopeId = paused.params.callFrames[0].scopeChain[0].object.objectId; console.log('[test]', 'Verify we can read current application state'); @@ -79,7 +81,7 @@ async function testBreakpoint(session) { 'generatePreview': true } }); - assertScopeValues(response, { t: 1001, k: 1 }); + assertScopeValues(response, { t: 1001, k: 1, message: 'A message' }); let { result } = await session.send({ 'method': 'Debugger.evaluateOnCallFrame', 'params': { diff --git a/test/parallel/test-module-main-extension-lookup.js b/test/parallel/test-module-main-extension-lookup.js index 0a8cc47c77b2ed..6f7bc2eb1db6b2 100644 --- a/test/parallel/test-module-main-extension-lookup.js +++ b/test/parallel/test-module-main-extension-lookup.js @@ -5,3 +5,5 @@ const { execFileSync } = require('child_process'); const node = process.argv[0]; execFileSync(node, ['--experimental-modules', 'test/es-module/test-esm-ok']); +execFileSync(node, ['--experimental-modules', + 'test/fixtures/es-modules/noext']); diff --git a/test/parallel/test-module-symlinked-peer-modules.js b/test/parallel/test-module-symlinked-peer-modules.js index f93dea720f9a12..27e67b31d182e5 100644 --- a/test/parallel/test-module-symlinked-peer-modules.js +++ b/test/parallel/test-module-symlinked-peer-modules.js @@ -43,7 +43,7 @@ fs.mkdirSync(moduleB); // Attempt to make the symlink. If this fails due to lack of sufficient // permissions, the test will bail out and be skipped. try { - fs.symlinkSync(moduleA, moduleA_link); + fs.symlinkSync(moduleA, moduleA_link, 'dir'); } catch (err) { if (err.code !== 'EPERM') throw err; common.skip('insufficient privileges for symlinks'); diff --git a/test/parallel/test-net-pingpong.js b/test/parallel/test-net-pingpong.js index 9fc59db4e2ff2f..4ab36fc23957b5 100644 --- a/test/parallel/test-net-pingpong.js +++ b/test/parallel/test-net-pingpong.js @@ -53,7 +53,7 @@ function pingPongTest(port, host) { // Since we never queue data (we're always waiting for the PING // before sending a pong) the writeQueueSize should always be less // than one message. - assert.ok(0 <= socket.bufferSize && socket.bufferSize <= 4); + assert.ok(socket.bufferSize >= 0 && socket.bufferSize <= 4); assert.strictEqual(socket.writable, true); assert.strictEqual(socket.readable, true); diff --git a/test/parallel/test-net-server-max-connections.js b/test/parallel/test-net-server-max-connections.js index c73efb3c1ef406..a711e5295be8bf 100644 --- a/test/parallel/test-net-server-max-connections.js +++ b/test/parallel/test-net-server-max-connections.js @@ -88,7 +88,7 @@ function makeConnection(index) { c.on('data', function(b) { gotData = true; - assert.ok(0 < b.length); + assert.ok(b.length > 0); }); c.on('error', function(e) { diff --git a/test/parallel/test-net-socket-byteswritten.js b/test/parallel/test-net-socket-byteswritten.js index 6f3ce8a3c6c8d1..b7b7af89e215db 100644 --- a/test/parallel/test-net-socket-byteswritten.js +++ b/test/parallel/test-net-socket-byteswritten.js @@ -16,7 +16,7 @@ server.listen(0, common.mustCall(function() { socket.cork(); socket.write('one'); - socket.write(new Buffer('twø', 'utf8')); + socket.write(Buffer.from('twø', 'utf8')); socket.uncork(); diff --git a/test/parallel/test-pending-deprecation.js b/test/parallel/test-pending-deprecation.js index 9b1fd5addfa0c6..f8b4ec8e5b7b94 100644 --- a/test/parallel/test-pending-deprecation.js +++ b/test/parallel/test-pending-deprecation.js @@ -25,7 +25,8 @@ switch (process.argv[2]) { break; default: // Verify that the flag is off by default. - assert.strictEqual(config.pendingDeprecation, undefined); + const envvar = process.env.NODE_PENDING_DEPRECATION; + assert.strictEqual(config.pendingDeprecation, envvar && envvar[0] === '1'); // Test the --pending-deprecation command line switch. fork(__filename, ['switch'], { diff --git a/test/parallel/test-performance.js b/test/parallel/test-performance.js index ba15479050f9e1..d262e1f1f149d1 100644 --- a/test/parallel/test-performance.js +++ b/test/parallel/test-performance.js @@ -7,6 +7,12 @@ const { performance } = require('perf_hooks'); assert(performance); assert(performance.nodeTiming); assert.strictEqual(typeof performance.timeOrigin, 'number'); +// Use a fairly large epsilon value, since we can only guarantee that the node +// process started up in 20 seconds. +assert(Math.abs(performance.timeOrigin - Date.now()) < 20000); + +const inited = performance.now(); +assert(inited < 20000); { const entries = performance.getEntries(); @@ -104,23 +110,81 @@ assert.strictEqual(typeof performance.timeOrigin, 'number'); assert.strictEqual(performance.nodeTiming.name, 'node'); assert.strictEqual(performance.nodeTiming.entryType, 'node'); -[ - 'startTime', - 'duration', - 'nodeStart', - 'v8Start', - 'bootstrapComplete', - 'environment', - 'loopStart', - 'loopExit', - 'thirdPartyMainStart', - 'thirdPartyMainEnd', - 'clusterSetupStart', - 'clusterSetupEnd', - 'moduleLoadStart', - 'moduleLoadEnd', - 'preloadModuleLoadStart', - 'preloadModuleLoadEnd' -].forEach((i) => { - assert.strictEqual(typeof performance.nodeTiming[i], 'number'); +function checkNodeTiming(props) { + for (const prop of Object.keys(props)) { + if (props[prop].around !== undefined) { + assert.strictEqual(typeof performance.nodeTiming[prop], 'number'); + const delta = performance.nodeTiming[prop] - props[prop].around; + assert(Math.abs(delta) < 1000); + } else { + assert.strictEqual(performance.nodeTiming[prop], props[prop]); + } + } +} + +checkNodeTiming({ + name: 'node', + entryType: 'node', + startTime: 0, + duration: { around: performance.now() }, + nodeStart: { around: 0 }, + v8Start: { around: 0 }, + bootstrapComplete: -1, + environment: { around: 0 }, + loopStart: -1, + loopExit: -1, + thirdPartyMainStart: -1, + thirdPartyMainEnd: -1, + clusterSetupStart: -1, + clusterSetupEnd: -1, + moduleLoadStart: { around: inited }, + moduleLoadEnd: { around: inited }, + preloadModuleLoadStart: { around: inited }, + preloadModuleLoadEnd: { around: inited }, +}); + +setTimeout(() => { + checkNodeTiming({ + name: 'node', + entryType: 'node', + startTime: 0, + duration: { around: performance.now() }, + nodeStart: { around: 0 }, + v8Start: { around: 0 }, + bootstrapComplete: { around: inited }, + environment: { around: 0 }, + loopStart: { around: inited }, + loopExit: -1, + thirdPartyMainStart: -1, + thirdPartyMainEnd: -1, + clusterSetupStart: -1, + clusterSetupEnd: -1, + moduleLoadStart: { around: inited }, + moduleLoadEnd: { around: inited }, + preloadModuleLoadStart: { around: inited }, + preloadModuleLoadEnd: { around: inited }, + }); +}, 2000); + +process.on('exit', () => { + checkNodeTiming({ + name: 'node', + entryType: 'node', + startTime: 0, + duration: { around: performance.now() }, + nodeStart: { around: 0 }, + v8Start: { around: 0 }, + bootstrapComplete: { around: inited }, + environment: { around: 0 }, + loopStart: { around: inited }, + loopExit: { around: performance.now() }, + thirdPartyMainStart: -1, + thirdPartyMainEnd: -1, + clusterSetupStart: -1, + clusterSetupEnd: -1, + moduleLoadStart: { around: inited }, + moduleLoadEnd: { around: inited }, + preloadModuleLoadStart: { around: inited }, + preloadModuleLoadEnd: { around: inited }, + }); }); diff --git a/test/parallel/test-postmortem-metadata.js b/test/parallel/test-postmortem-metadata.js index 3ee7b5dc3cee1a..e657ec70e8f5b2 100644 --- a/test/parallel/test-postmortem-metadata.js +++ b/test/parallel/test-postmortem-metadata.js @@ -7,7 +7,13 @@ const common = require('../common'); const assert = require('assert'); const { spawnSync } = require('child_process'); -const args = [process.execPath]; +const { getSharedLibPath } = require('../common/shared-lib-util.js'); + +// For shared lib case, check shared lib instead +const args = [ + process.config.variables.node_shared ? + getSharedLibPath() : process.execPath +]; if (common.isAIX) args.unshift('-Xany', '-B'); diff --git a/test/parallel/test-readdouble.js b/test/parallel/test-readdouble.js index 76b259ba498a6d..f635edba9038d3 100644 --- a/test/parallel/test-readdouble.js +++ b/test/parallel/test-readdouble.js @@ -29,102 +29,101 @@ const assert = require('assert'); /* * Test (64 bit) double */ -function test(clazz) { - const buffer = new clazz(8); - - buffer[0] = 0x55; - buffer[1] = 0x55; - buffer[2] = 0x55; - buffer[3] = 0x55; - buffer[4] = 0x55; - buffer[5] = 0x55; - buffer[6] = 0xd5; - buffer[7] = 0x3f; - assert.strictEqual(1.1945305291680097e+103, buffer.readDoubleBE(0)); - assert.strictEqual(0.3333333333333333, buffer.readDoubleLE(0)); - - buffer[0] = 1; - buffer[1] = 0; - buffer[2] = 0; - buffer[3] = 0; - buffer[4] = 0; - buffer[5] = 0; - buffer[6] = 0xf0; - buffer[7] = 0x3f; - assert.strictEqual(7.291122019655968e-304, buffer.readDoubleBE(0)); - assert.strictEqual(1.0000000000000002, buffer.readDoubleLE(0)); - - buffer[0] = 2; - assert.strictEqual(4.778309726801735e-299, buffer.readDoubleBE(0)); - assert.strictEqual(1.0000000000000004, buffer.readDoubleLE(0)); - - buffer[0] = 1; - buffer[6] = 0; - buffer[7] = 0; - assert.strictEqual(7.291122019556398e-304, buffer.readDoubleBE(0)); - assert.strictEqual(5e-324, buffer.readDoubleLE(0)); - - buffer[0] = 0xff; - buffer[1] = 0xff; - buffer[2] = 0xff; - buffer[3] = 0xff; - buffer[4] = 0xff; - buffer[5] = 0xff; - buffer[6] = 0x0f; - buffer[7] = 0x00; - assert.ok(Number.isNaN(buffer.readDoubleBE(0))); - assert.strictEqual(2.225073858507201e-308, buffer.readDoubleLE(0)); - - buffer[6] = 0xef; - buffer[7] = 0x7f; - assert.ok(Number.isNaN(buffer.readDoubleBE(0))); - assert.strictEqual(1.7976931348623157e+308, buffer.readDoubleLE(0)); - - buffer[0] = 0; - buffer[1] = 0; - buffer[2] = 0; - buffer[3] = 0; - buffer[4] = 0; - buffer[5] = 0; - buffer[6] = 0xf0; - buffer[7] = 0x3f; - assert.strictEqual(3.03865e-319, buffer.readDoubleBE(0)); - assert.strictEqual(1, buffer.readDoubleLE(0)); - - buffer[6] = 0; - buffer[7] = 0x40; - assert.strictEqual(3.16e-322, buffer.readDoubleBE(0)); - assert.strictEqual(2, buffer.readDoubleLE(0)); - - buffer[7] = 0xc0; - assert.strictEqual(9.5e-322, buffer.readDoubleBE(0)); - assert.strictEqual(-2, buffer.readDoubleLE(0)); - - buffer[6] = 0x10; - buffer[7] = 0; - assert.strictEqual(2.0237e-320, buffer.readDoubleBE(0)); - assert.strictEqual(2.2250738585072014e-308, buffer.readDoubleLE(0)); - - buffer[6] = 0; - assert.strictEqual(0, buffer.readDoubleBE(0)); - assert.strictEqual(0, buffer.readDoubleLE(0)); - assert.strictEqual(false, 1 / buffer.readDoubleLE(0) < 0); - - buffer[7] = 0x80; - assert.strictEqual(6.3e-322, buffer.readDoubleBE(0)); - assert.strictEqual(0, buffer.readDoubleLE(0)); - assert.strictEqual(true, 1 / buffer.readDoubleLE(0) < 0); - - buffer[6] = 0xf0; - buffer[7] = 0x7f; - assert.strictEqual(3.0418e-319, buffer.readDoubleBE(0)); - assert.strictEqual(Infinity, buffer.readDoubleLE(0)); - - buffer[6] = 0xf0; - buffer[7] = 0xff; - assert.strictEqual(3.04814e-319, buffer.readDoubleBE(0)); - assert.strictEqual(-Infinity, buffer.readDoubleLE(0)); -} - - -test(Buffer); +const buffer = Buffer.allocUnsafe(8); + +buffer[0] = 0x55; +buffer[1] = 0x55; +buffer[2] = 0x55; +buffer[3] = 0x55; +buffer[4] = 0x55; +buffer[5] = 0x55; +buffer[6] = 0xd5; +buffer[7] = 0x3f; +assert.strictEqual(1.1945305291680097e+103, buffer.readDoubleBE(0)); +assert.strictEqual(0.3333333333333333, buffer.readDoubleLE(0)); + +buffer[0] = 1; +buffer[1] = 0; +buffer[2] = 0; +buffer[3] = 0; +buffer[4] = 0; +buffer[5] = 0; +buffer[6] = 0xf0; +buffer[7] = 0x3f; +assert.strictEqual(7.291122019655968e-304, buffer.readDoubleBE(0)); +assert.strictEqual(1.0000000000000002, buffer.readDoubleLE(0)); + +buffer[0] = 2; +assert.strictEqual(4.778309726801735e-299, buffer.readDoubleBE(0)); +assert.strictEqual(1.0000000000000004, buffer.readDoubleLE(0)); + +buffer[0] = 1; +buffer[6] = 0; +buffer[7] = 0; +assert.strictEqual(7.291122019556398e-304, buffer.readDoubleBE(0)); +assert.strictEqual(5e-324, buffer.readDoubleLE(0)); + +buffer[0] = 0xff; +buffer[1] = 0xff; +buffer[2] = 0xff; +buffer[3] = 0xff; +buffer[4] = 0xff; +buffer[5] = 0xff; +buffer[6] = 0x0f; +buffer[7] = 0x00; +assert.ok(Number.isNaN(buffer.readDoubleBE(0))); +assert.strictEqual(2.225073858507201e-308, buffer.readDoubleLE(0)); + +buffer[6] = 0xef; +buffer[7] = 0x7f; +assert.ok(Number.isNaN(buffer.readDoubleBE(0))); +assert.strictEqual(1.7976931348623157e+308, buffer.readDoubleLE(0)); + +buffer[0] = 0; +buffer[1] = 0; +buffer[2] = 0; +buffer[3] = 0; +buffer[4] = 0; +buffer[5] = 0; +buffer[6] = 0xf0; +buffer[7] = 0x3f; +assert.strictEqual(3.03865e-319, buffer.readDoubleBE(0)); +assert.strictEqual(1, buffer.readDoubleLE(0)); + +buffer[6] = 0; +buffer[7] = 0x40; +assert.strictEqual(3.16e-322, buffer.readDoubleBE(0)); +assert.strictEqual(2, buffer.readDoubleLE(0)); + +buffer[7] = 0xc0; +assert.strictEqual(9.5e-322, buffer.readDoubleBE(0)); +assert.strictEqual(-2, buffer.readDoubleLE(0)); + +buffer[6] = 0x10; +buffer[7] = 0; +assert.strictEqual(2.0237e-320, buffer.readDoubleBE(0)); +assert.strictEqual(2.2250738585072014e-308, buffer.readDoubleLE(0)); + +buffer[6] = 0; +assert.strictEqual(0, buffer.readDoubleBE(0)); +assert.strictEqual(0, buffer.readDoubleLE(0)); +assert.strictEqual(false, 1 / buffer.readDoubleLE(0) < 0); + +buffer[7] = 0x80; +assert.strictEqual(6.3e-322, buffer.readDoubleBE(0)); +assert.strictEqual(-0, buffer.readDoubleLE(0)); +assert.strictEqual(true, 1 / buffer.readDoubleLE(0) < 0); + +buffer[6] = 0xf0; +buffer[7] = 0x7f; +assert.strictEqual(3.0418e-319, buffer.readDoubleBE(0)); +assert.strictEqual(Infinity, buffer.readDoubleLE(0)); + +buffer[7] = 0xff; +assert.strictEqual(3.04814e-319, buffer.readDoubleBE(0)); +assert.strictEqual(-Infinity, buffer.readDoubleLE(0)); + +buffer.writeDoubleBE(246800); +assert.strictEqual(buffer.readDoubleBE(), 246800); +assert.strictEqual(buffer.readDoubleBE(0.7), 246800); +assert.strictEqual(buffer.readDoubleBE(NaN), 246800); diff --git a/test/parallel/test-repl-tab-complete.js b/test/parallel/test-repl-tab-complete.js index c9048d887d5cab..6485f8bd17f58b 100644 --- a/test/parallel/test-repl-tab-complete.js +++ b/test/parallel/test-repl-tab-complete.js @@ -544,7 +544,8 @@ editor.completer('var log = console.l', common.mustCall((error, data) => { ['Let', 'Const', 'Klass'].forEach((type) => { const query = `lexical${type[0]}`; - const expected = hasInspector ? [[`lexical${type}`], query] : []; + const expected = hasInspector ? [[`lexical${type}`], query] : + [[], `lexical${type[0]}`]; testRepl.complete(query, common.mustCall((error, data) => { assert.deepStrictEqual(data, expected); })); diff --git a/test/parallel/test-repl-underscore.js b/test/parallel/test-repl-underscore.js index 91f32223e180b9..57929244ae4374 100644 --- a/test/parallel/test-repl-underscore.js +++ b/test/parallel/test-repl-underscore.js @@ -10,6 +10,7 @@ testStrictMode(); testResetContext(); testResetContextGlobal(); testMagicMode(); +testError(); function testSloppyMode() { const r = initRepl(repl.REPL_MODE_SLOPPY); @@ -153,6 +154,73 @@ function testResetContextGlobal() { delete global.require; } +function testError() { + const r = initRepl(repl.REPL_MODE_STRICT); + + r.write(`_error; // initial value undefined + throw new Error('foo'); // throws error + _error; // shows error + fs.readdirSync('/nonexistent?'); // throws error, sync + _error.code; // shows error code + _error.syscall; // shows error syscall + setImmediate(() => { throw new Error('baz'); }); undefined; + // throws error, async + `); + + setImmediate(() => { + const lines = r.output.accum.trim().split('\n'); + const expectedLines = [ + 'undefined', + + // The error, both from the original throw and the `_error` echo. + 'Error: foo', + 'Error: foo', + + // The sync error, with individual property echoes + /Error: ENOENT: no such file or directory, scandir '.*nonexistent.*'/, + /fs\.readdirSync/, + "'ENOENT'", + "'scandir'", + + // Dummy 'undefined' from the explicit silencer + one from the comment + 'undefined', + 'undefined', + + // The message from the original throw + 'Error: baz', + /setImmediate/, + /^ at/, + /^ at/, + /^ at/, + /^ at/, + ]; + for (const line of lines) { + const expected = expectedLines.shift(); + if (typeof expected === 'string') + assert.strictEqual(line, expected); + else + assert(expected.test(line), `${line} should match ${expected}`); + } + assert.strictEqual(expectedLines.length, 0); + + // Reset output, check that '_error' is the asynchronously caught error. + r.output.accum = ''; + r.write(`_error.message // show the message + _error = 0; // disable auto-assignment + throw new Error('quux'); // new error + _error; // should not see the new error + `); + + assertOutput(r.output, [ + "'baz'", + 'Expression assignment to _error now disabled.', + '0', + 'Error: quux', + '0' + ]); + }); +} + function initRepl(mode, useGlobal) { const inputStream = new stream.PassThrough(); const outputStream = new stream.PassThrough(); diff --git a/test/parallel/test-tls-pause.js b/test/parallel/test-tls-pause.js index a6130cd4331f62..e246acff807a91 100644 --- a/test/parallel/test-tls-pause.js +++ b/test/parallel/test-tls-pause.js @@ -60,7 +60,7 @@ server.listen(0, common.mustCall(() => { console.error('sending'); const ret = client.write(Buffer.allocUnsafe(bufSize)); console.error(`write => ${ret}`); - if (false !== ret) { + if (ret !== false) { console.error('write again'); sent += bufSize; assert.ok(sent < 100 * 1024 * 1024); // max 100MB diff --git a/test/parallel/test-trace-events-file-pattern.js b/test/parallel/test-trace-events-file-pattern.js new file mode 100644 index 00000000000000..46059ad31d58b2 --- /dev/null +++ b/test/parallel/test-trace-events-file-pattern.js @@ -0,0 +1,30 @@ +'use strict'; +const common = require('../common'); +const tmpdir = require('../common/tmpdir'); +const assert = require('assert'); +const cp = require('child_process'); +const fs = require('fs'); + +tmpdir.refresh(); +process.chdir(tmpdir.path); + +const CODE = + 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)'; + +const proc = cp.spawn(process.execPath, [ + '--trace-events-enabled', + '--trace-event-file-pattern', + // eslint-disable-next-line no-template-curly-in-string + '${pid}-${rotation}-${pid}-${rotation}.tracing.log', + '-e', CODE +]); + +proc.once('exit', common.mustCall(() => { + const expectedFilename = `${proc.pid}-1-${proc.pid}-1.tracing.log`; + + assert(common.fileExists(expectedFilename)); + fs.readFile(expectedFilename, common.mustCall((err, data) => { + const traces = JSON.parse(data.toString()).traceEvents; + assert(traces.length > 0); + })); +})); diff --git a/test/parallel/test-zlib-empty-buffer.js b/test/parallel/test-zlib-empty-buffer.js index 908c89cbbcef2e..8b299f8728282d 100644 --- a/test/parallel/test-zlib-empty-buffer.js +++ b/test/parallel/test-zlib-empty-buffer.js @@ -3,7 +3,7 @@ const common = require('../common'); const zlib = require('zlib'); const { inspect, promisify } = require('util'); const assert = require('assert'); -const emptyBuffer = new Buffer(0); +const emptyBuffer = Buffer.alloc(0); common.crashOnUnhandledRejection(); diff --git a/test/pummel/test-net-throttle.js b/test/pummel/test-net-throttle.js index ea48aa74d2f2b8..a08ed85ccd41bb 100644 --- a/test/pummel/test-net-throttle.js +++ b/test/pummel/test-net-throttle.js @@ -39,7 +39,7 @@ const server = net.createServer(function(connection) { connection.write(body.slice(part_N, 2 * part_N)); assert.strictEqual(false, connection.write(body.slice(2 * part_N, N))); console.log(`bufferSize: ${connection.bufferSize}`, 'expecting', N); - assert.ok(0 <= connection.bufferSize && + assert.ok(connection.bufferSize >= 0 && connection.writableLength <= N); connection.end(); }); diff --git a/test/pummel/test-next-tick-infinite-calls.js b/test/pummel/test-next-tick-infinite-calls.js index b72d18fa40c23e..5ee44076dcc2f3 100644 --- a/test/pummel/test-next-tick-infinite-calls.js +++ b/test/pummel/test-next-tick-infinite-calls.js @@ -28,7 +28,7 @@ let complete = 0; // FATAL ERROR: JS Allocation failed - process out of memory // if the depth counter doesn't clear the nextTickQueue properly. (function runner() { - if (1e8 > ++complete) + if (++complete < 1e8) process.nextTick(runner); }()); diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index 5c0b460154f84f..b95db2a111ea67 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -13,7 +13,6 @@ test-inspector-debug-end : PASS, FLAKY test-inspector-async-hook-setup-at-signal: PASS, FLAKY test-http2-ping-flood : PASS, FLAKY test-http2-settings-flood : PASS, FLAKY -test-inspector-stop-profile-after-done: PASS, FLAKY [$system==linux] diff --git a/test/sequential/test-async-wrap-getasyncid.js b/test/sequential/test-async-wrap-getasyncid.js index 4e01b5fbb24c43..1744831ca3be12 100644 --- a/test/sequential/test-async-wrap-getasyncid.js +++ b/test/sequential/test-async-wrap-getasyncid.js @@ -284,9 +284,11 @@ if (common.hasCrypto) { // eslint-disable-line crypto-check testInitialized(handle, 'UDP'); testUninitialized(req, 'SendWrap'); - handle.bind('0.0.0.0', common.PORT, undefined); + handle.bind('0.0.0.0', 0, undefined); + const addr = {}; + handle.getsockname(addr); req.address = '127.0.0.1'; - req.port = common.PORT; + req.port = addr.port; req.oncomplete = () => handle.close(); handle.send(req, [Buffer.alloc(1)], 1, req.port, req.address, true); testInitialized(req, 'SendWrap'); diff --git a/test/sequential/test-inspector-stop-profile-after-done.js b/test/sequential/test-inspector-stop-profile-after-done.js index 7069e490255ce5..15764d84860e8c 100644 --- a/test/sequential/test-inspector-stop-profile-after-done.js +++ b/test/sequential/test-inspector-stop-profile-after-done.js @@ -12,17 +12,18 @@ async function runTests() { console.log(new Object()); if (c++ === 10) clearInterval(interval); - }, 10);`); + }, ${common.platformTimeout(30)});`); const session = await child.connectInspectorSession(); session.send([ - { 'method': 'Profiler.setSamplingInterval', 'params': { 'interval': 100 } }, - { 'method': 'Profiler.enable' }, - { 'method': 'Runtime.runIfWaitingForDebugger' }, - { 'method': 'Profiler.start' }]); + { method: 'Profiler.setSamplingInterval', + params: { interval: common.platformTimeout(300) } }, + { method: 'Profiler.enable' }, + { method: 'Runtime.runIfWaitingForDebugger' }, + { method: 'Profiler.start' }]); while (await child.nextStderrString() !== 'Waiting for the debugger to disconnect...'); - await session.send({ 'method': 'Profiler.stop' }); + await session.send({ method: 'Profiler.stop' }); session.disconnect(); assert.strictEqual(0, (await child.expectShutdown()).exitCode); } diff --git a/tools/icu/iculslocs.cc b/tools/icu/iculslocs.cc index 3ceb8d2a4d81d0..a6931d3a9a62d6 100644 --- a/tools/icu/iculslocs.cc +++ b/tools/icu/iculslocs.cc @@ -55,6 +55,7 @@ Japanese, it doesn't *claim* to have Japanese. #include #include #include +#include const char* PROG = "iculslocs"; const char* NAME = U_ICUDATA_NAME; // assume ICU data